[ 505.507045] env[63028]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=63028) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 505.507408] env[63028]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=63028) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 505.507526] env[63028]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=63028) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 505.507876] env[63028]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 505.603598] env[63028]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=63028) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 505.614725] env[63028]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=63028) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 505.656720] env[63028]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 506.214575] env[63028]: INFO nova.virt.driver [None req-6b744931-94f2-4813-ad15-6042da18216c None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 506.283565] env[63028]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.283723] env[63028]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 506.283808] env[63028]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=63028) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 509.474014] env[63028]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-28c6786f-566f-47d9-b9f8-f98c1ac0dc71 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.490152] env[63028]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=63028) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 509.490337] env[63028]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-3ded80a1-e705-4a70-a820-ad2c1e27c012 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.522671] env[63028]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 70145. [ 509.522799] env[63028]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.239s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 509.523382] env[63028]: INFO nova.virt.vmwareapi.driver [None req-6b744931-94f2-4813-ad15-6042da18216c None None] VMware vCenter version: 7.0.3 [ 509.526730] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cdd647-9ed2-4adf-94dd-476475d60bb7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.548856] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834d3b4a-32c0-4734-b1bc-afb0697dd02b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.554703] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c4b87e-72a7-4d3a-b6aa-9cb22797b89b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.561300] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd8630a-4e8d-4a42-95b2-22931fd77358 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.574112] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd37853-cd7c-4741-855c-c9dd0dacbcfc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.611215] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a6291b-446e-46ad-91d7-abccae578424 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.642368] env[63028]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-ae3a672e-9c53-43d1-8b51-715bbed51c2b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.648283] env[63028]: DEBUG nova.virt.vmwareapi.driver [None req-6b744931-94f2-4813-ad15-6042da18216c None None] Extension org.openstack.compute already exists. {{(pid=63028) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 509.650979] env[63028]: INFO nova.compute.provider_config [None req-6b744931-94f2-4813-ad15-6042da18216c None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 510.154644] env[63028]: DEBUG nova.context [None req-6b744931-94f2-4813-ad15-6042da18216c None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),4df5a562-0618-4fe1-8a9f-22e44a727f30(cell1) {{(pid=63028) load_cells /opt/stack/nova/nova/context.py:464}} [ 510.157727] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.157950] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 510.158744] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 510.159186] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Acquiring lock "4df5a562-0618-4fe1-8a9f-22e44a727f30" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.159376] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Lock "4df5a562-0618-4fe1-8a9f-22e44a727f30" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 510.160510] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Lock "4df5a562-0618-4fe1-8a9f-22e44a727f30" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 510.180536] env[63028]: INFO dbcounter [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Registered counter for database nova_cell0 [ 510.188372] env[63028]: INFO dbcounter [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Registered counter for database nova_cell1 [ 510.191655] env[63028]: DEBUG oslo_db.sqlalchemy.engines [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63028) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 510.192013] env[63028]: DEBUG oslo_db.sqlalchemy.engines [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63028) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 510.197199] env[63028]: ERROR nova.db.main.api [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 510.197199] env[63028]: result = function(*args, **kwargs) [ 510.197199] env[63028]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 510.197199] env[63028]: return func(*args, **kwargs) [ 510.197199] env[63028]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 510.197199] env[63028]: result = fn(*args, **kwargs) [ 510.197199] env[63028]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 510.197199] env[63028]: return f(*args, **kwargs) [ 510.197199] env[63028]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 510.197199] env[63028]: return db.service_get_minimum_version(context, binaries) [ 510.197199] env[63028]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 510.197199] env[63028]: _check_db_access() [ 510.197199] env[63028]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 510.197199] env[63028]: stacktrace = ''.join(traceback.format_stack()) [ 510.197199] env[63028]: [ 510.197971] env[63028]: ERROR nova.db.main.api [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 510.197971] env[63028]: result = function(*args, **kwargs) [ 510.197971] env[63028]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 510.197971] env[63028]: return func(*args, **kwargs) [ 510.197971] env[63028]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 510.197971] env[63028]: result = fn(*args, **kwargs) [ 510.197971] env[63028]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 510.197971] env[63028]: return f(*args, **kwargs) [ 510.197971] env[63028]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 510.197971] env[63028]: return db.service_get_minimum_version(context, binaries) [ 510.197971] env[63028]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 510.197971] env[63028]: _check_db_access() [ 510.197971] env[63028]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 510.197971] env[63028]: stacktrace = ''.join(traceback.format_stack()) [ 510.197971] env[63028]: [ 510.198396] env[63028]: WARNING nova.objects.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 510.198504] env[63028]: WARNING nova.objects.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Failed to get minimum service version for cell 4df5a562-0618-4fe1-8a9f-22e44a727f30 [ 510.198921] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Acquiring lock "singleton_lock" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 510.199094] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Acquired lock "singleton_lock" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 510.199336] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Releasing lock "singleton_lock" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 510.199654] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Full set of CONF: {{(pid=63028) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 510.199796] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ******************************************************************************** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 510.199925] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] Configuration options gathered from: {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 510.200075] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 510.200276] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 510.200405] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ================================================================================ {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 510.200609] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] allow_resize_to_same_host = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.200778] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] arq_binding_timeout = 300 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.200910] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] backdoor_port = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.201055] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] backdoor_socket = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.201228] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] block_device_allocate_retries = 60 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.201393] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] block_device_allocate_retries_interval = 3 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.201563] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cert = self.pem {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.201730] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.201896] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute_monitors = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.202077] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] config_dir = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.202252] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] config_drive_format = iso9660 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.202389] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.202554] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] config_source = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.202722] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] console_host = devstack {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.202886] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] control_exchange = nova {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.203055] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cpu_allocation_ratio = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.203216] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] daemon = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.203387] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] debug = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.203542] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] default_access_ip_network_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.203706] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] default_availability_zone = nova {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.203862] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] default_ephemeral_format = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.204031] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] default_green_pool_size = 1000 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.204278] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.204447] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] default_schedule_zone = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.204608] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] disk_allocation_ratio = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.204766] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] enable_new_services = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.204940] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] enabled_apis = ['osapi_compute'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.205122] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] enabled_ssl_apis = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.205285] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] flat_injected = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.205444] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] force_config_drive = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.205601] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] force_raw_images = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.205768] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] graceful_shutdown_timeout = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.205929] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] heal_instance_info_cache_interval = -1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.206166] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] host = cpu-1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.206352] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] initial_cpu_allocation_ratio = 4.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.206518] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] initial_disk_allocation_ratio = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.206683] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] initial_ram_allocation_ratio = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.206909] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.207089] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] instance_build_timeout = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.207259] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] instance_delete_interval = 300 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.207427] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] instance_format = [instance: %(uuid)s] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.207591] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] instance_name_template = instance-%08x {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.207751] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] instance_usage_audit = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.207916] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] instance_usage_audit_period = month {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.208090] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.208256] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] instances_path = /opt/stack/data/nova/instances {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.208423] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] internal_service_availability_zone = internal {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.208580] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] key = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.208739] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] live_migration_retry_count = 30 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.208905] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] log_color = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.209079] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] log_config_append = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.209247] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.209406] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] log_dir = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.209563] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] log_file = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.209691] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] log_options = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.209852] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] log_rotate_interval = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.210028] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] log_rotate_interval_type = days {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.210198] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] log_rotation_type = none {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.210335] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.210461] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.210627] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.210789] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.210916] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.211091] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] long_rpc_timeout = 1800 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.211254] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] max_concurrent_builds = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.211413] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] max_concurrent_live_migrations = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.211569] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] max_concurrent_snapshots = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.211726] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] max_local_block_devices = 3 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.211883] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] max_logfile_count = 30 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.212051] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] max_logfile_size_mb = 200 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.212213] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] maximum_instance_delete_attempts = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.212381] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] metadata_listen = 0.0.0.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.212547] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] metadata_listen_port = 8775 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.212714] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] metadata_workers = 2 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.212875] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] migrate_max_retries = -1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.213050] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] mkisofs_cmd = genisoimage {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.213259] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] my_block_storage_ip = 10.180.1.21 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.213394] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] my_ip = 10.180.1.21 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.213595] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.213758] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] network_allocate_retries = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.213933] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.214114] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] osapi_compute_listen = 0.0.0.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.214278] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] osapi_compute_listen_port = 8774 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.214445] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] osapi_compute_unique_server_name_scope = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.214645] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] osapi_compute_workers = 2 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.214769] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] password_length = 12 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.214926] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] periodic_enable = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.215098] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] periodic_fuzzy_delay = 60 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.215266] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] pointer_model = usbtablet {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.215430] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] preallocate_images = none {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.215587] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] publish_errors = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.215716] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] pybasedir = /opt/stack/nova {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.215870] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ram_allocation_ratio = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.216037] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] rate_limit_burst = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.216203] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] rate_limit_except_level = CRITICAL {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.216362] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] rate_limit_interval = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.216517] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] reboot_timeout = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.216672] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] reclaim_instance_interval = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.216825] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] record = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.216987] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] reimage_timeout_per_gb = 60 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.217167] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] report_interval = 120 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.217324] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] rescue_timeout = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.217482] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] reserved_host_cpus = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.217637] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] reserved_host_disk_mb = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.217793] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] reserved_host_memory_mb = 512 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.217954] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] reserved_huge_pages = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.218132] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] resize_confirm_window = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.218291] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] resize_fs_using_block_device = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.218447] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] resume_guests_state_on_host_boot = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.218610] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.218769] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] rpc_response_timeout = 60 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.218923] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] run_external_periodic_tasks = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.219102] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] running_deleted_instance_action = reap {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.219265] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] running_deleted_instance_poll_interval = 1800 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.219427] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] running_deleted_instance_timeout = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.219584] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] scheduler_instance_sync_interval = 120 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.219749] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] service_down_time = 720 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.219913] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] servicegroup_driver = db {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.220078] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] shell_completion = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.220240] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] shelved_offload_time = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.220401] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] shelved_poll_interval = 3600 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.220564] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] shutdown_timeout = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.220719] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] source_is_ipv6 = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.220876] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ssl_only = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.221137] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.221311] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] sync_power_state_interval = 600 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.221476] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] sync_power_state_pool_size = 1000 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.221644] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] syslog_log_facility = LOG_USER {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.221800] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] tempdir = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.221959] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] timeout_nbd = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.222139] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] transport_url = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.222303] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] update_resources_interval = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.222464] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] use_cow_images = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.222622] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] use_journal = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.222776] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] use_json = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.222930] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] use_rootwrap_daemon = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.223094] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] use_stderr = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.223252] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] use_syslog = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.223406] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vcpu_pin_set = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.223571] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plugging_is_fatal = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.223735] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plugging_timeout = 300 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.223898] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] virt_mkfs = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.224069] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] volume_usage_poll_interval = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.224231] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] watch_log_file = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.224395] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] web = /usr/share/spice-html5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 510.224574] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.224740] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.224896] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_brick.wait_mpath_device_interval = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.225078] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_concurrency.disable_process_locking = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.225976] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.226191] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.226369] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.226546] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_metrics.metrics_process_name = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.226720] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.226889] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.227091] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.auth_strategy = keystone {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.227284] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.compute_link_prefix = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.227459] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.227635] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.dhcp_domain = novalocal {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.227806] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.enable_instance_password = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.227970] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.glance_link_prefix = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.228155] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.228326] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.instance_list_cells_batch_strategy = distributed {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.228493] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.instance_list_per_project_cells = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.228655] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.list_records_by_skipping_down_cells = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.228819] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.local_metadata_per_cell = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.228986] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.max_limit = 1000 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.229170] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.metadata_cache_expiration = 15 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.229350] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.neutron_default_tenant_id = default {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.229519] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.response_validation = warn {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.229689] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.use_neutron_default_nets = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.229858] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.230033] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.vendordata_dynamic_failure_fatal = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.230211] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.230386] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.vendordata_dynamic_ssl_certfile = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.230556] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.vendordata_dynamic_targets = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.230719] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.vendordata_jsonfile_path = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.230898] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api.vendordata_providers = ['StaticJSON'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.231101] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.backend = dogpile.cache.memcached {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.231274] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.backend_argument = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.231438] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.backend_expiration_time = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.231608] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.config_prefix = cache.oslo {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.231777] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.dead_timeout = 60.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.231941] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.debug_cache_backend = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.232118] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.enable_retry_client = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.232283] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.enable_socket_keepalive = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.232456] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.enabled = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.232623] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.enforce_fips_mode = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.232789] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.expiration_time = 600 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.232951] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.hashclient_retry_attempts = 2 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.233139] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.hashclient_retry_delay = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.233310] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.memcache_dead_retry = 300 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.233469] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.memcache_password = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.233632] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.233791] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.233953] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.memcache_pool_maxsize = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.234129] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.memcache_pool_unused_timeout = 60 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.234291] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.memcache_sasl_enabled = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.234468] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.memcache_servers = ['localhost:11211'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.234637] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.memcache_socket_timeout = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.234835] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.memcache_username = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.234965] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.proxies = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.235144] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.redis_db = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.235307] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.redis_password = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.235479] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.redis_sentinel_service_name = mymaster {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.235655] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.235825] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.redis_server = localhost:6379 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.235992] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.redis_socket_timeout = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.236171] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.redis_username = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.236340] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.retry_attempts = 2 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.236505] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.retry_delay = 0.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.236668] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.socket_keepalive_count = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.236829] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.socket_keepalive_idle = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.236992] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.socket_keepalive_interval = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.237175] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.tls_allowed_ciphers = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.237324] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.tls_cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.237484] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.tls_certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.237654] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.tls_enabled = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.237836] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cache.tls_keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.237973] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.auth_section = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.238163] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.auth_type = password {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.238328] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.238502] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.catalog_info = volumev3::publicURL {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.238664] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.238827] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.238990] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.cross_az_attach = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.239167] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.debug = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.239331] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.endpoint_template = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.239494] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.http_retries = 3 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.239656] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.239813] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.239984] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.os_region_name = RegionOne {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.240160] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.240323] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cinder.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.240497] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.240657] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.cpu_dedicated_set = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.240814] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.cpu_shared_set = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.240977] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.image_type_exclude_list = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.241155] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.live_migration_wait_for_vif_plug = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.241320] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.max_concurrent_disk_ops = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.241482] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.max_disk_devices_to_attach = -1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.241641] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.241806] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.241969] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.resource_provider_association_refresh = 300 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.242142] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.242307] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.shutdown_retry_interval = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.242484] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.242659] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] conductor.workers = 2 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.242834] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] console.allowed_origins = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.242996] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] console.ssl_ciphers = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.243179] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] console.ssl_minimum_version = default {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.243347] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] consoleauth.enforce_session_timeout = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.243515] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] consoleauth.token_ttl = 600 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.243684] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.243841] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.244009] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.244178] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.connect_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.244339] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.connect_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.244497] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.endpoint_override = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.244657] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.244814] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.244975] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.max_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.245148] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.min_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.245306] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.region_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.245465] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.retriable_status_codes = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.245620] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.service_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.245785] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.service_type = accelerator {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.245944] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.246116] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.status_code_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.246276] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.status_code_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.246438] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.246614] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.246774] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] cyborg.version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.246942] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.asyncio_connection = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.247113] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.asyncio_slave_connection = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.247287] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.backend = sqlalchemy {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.247454] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.connection = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.247618] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.connection_debug = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.247786] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.connection_parameters = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.247948] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.connection_recycle_time = 3600 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.248124] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.connection_trace = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.248287] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.db_inc_retry_interval = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.248451] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.db_max_retries = 20 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.248611] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.db_max_retry_interval = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.248775] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.db_retry_interval = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.248936] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.max_overflow = 50 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.249112] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.max_pool_size = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.249276] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.max_retries = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.249445] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.mysql_sql_mode = TRADITIONAL {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.249605] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.mysql_wsrep_sync_wait = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.249761] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.pool_timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.249923] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.retry_interval = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.250094] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.slave_connection = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.250262] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.sqlite_synchronous = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.250421] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] database.use_db_reconnect = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.250587] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.asyncio_connection = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.250746] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.asyncio_slave_connection = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.250914] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.backend = sqlalchemy {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.251091] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.connection = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.251258] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.connection_debug = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.251428] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.connection_parameters = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.251591] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.connection_recycle_time = 3600 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.251752] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.connection_trace = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.251915] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.db_inc_retry_interval = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.252092] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.db_max_retries = 20 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.252258] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.db_max_retry_interval = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.252421] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.db_retry_interval = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.252584] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.max_overflow = 50 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.252747] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.max_pool_size = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.252908] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.max_retries = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.253097] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.253263] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.mysql_wsrep_sync_wait = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.253423] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.pool_timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.253584] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.retry_interval = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.253744] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.slave_connection = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.253906] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] api_database.sqlite_synchronous = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.254098] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] devices.enabled_mdev_types = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.254281] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.254455] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ephemeral_storage_encryption.default_format = luks {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.254618] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ephemeral_storage_encryption.enabled = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.254783] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ephemeral_storage_encryption.key_size = 512 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.254953] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.api_servers = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.255133] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.255300] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.255466] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.255627] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.connect_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.255788] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.connect_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.255950] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.debug = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.256129] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.default_trusted_certificate_ids = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.256292] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.enable_certificate_validation = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.256454] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.enable_rbd_download = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.256613] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.endpoint_override = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.256779] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.256940] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.257114] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.max_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.257283] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.min_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.257445] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.num_retries = 3 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.257614] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.rbd_ceph_conf = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.257779] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.rbd_connect_timeout = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.257948] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.rbd_pool = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.258129] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.rbd_user = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.258300] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.region_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.258461] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.retriable_status_codes = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.258621] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.service_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.258788] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.service_type = image {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.258951] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.259123] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.status_code_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.259287] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.status_code_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.259448] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.259628] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.259792] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.verify_glance_signatures = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.259955] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] glance.version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.260139] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] guestfs.debug = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.260310] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.auth_section = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.260476] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.auth_type = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.260637] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.260796] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.260959] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.261133] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.connect_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.261295] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.connect_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.261457] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.endpoint_override = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.261620] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.261778] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.261937] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.max_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.262107] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.min_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.262278] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.region_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.262436] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.retriable_status_codes = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.262596] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.service_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.262766] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.service_type = shared-file-system {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.262932] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.share_apply_policy_timeout = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.263109] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.263272] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.status_code_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.263437] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.status_code_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.263598] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.263783] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.263944] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] manila.version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.264126] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] mks.enabled = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.264482] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.264674] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] image_cache.manager_interval = 2400 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.264847] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] image_cache.precache_concurrency = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.265029] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] image_cache.remove_unused_base_images = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.265209] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.265381] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.265557] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] image_cache.subdirectory_name = _base {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.265731] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.api_max_retries = 60 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.265895] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.api_retry_interval = 2 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.266076] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.auth_section = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.266247] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.auth_type = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.266411] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.266570] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.266733] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.266895] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.conductor_group = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.267065] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.connect_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.267238] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.connect_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.267395] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.endpoint_override = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.267560] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.267720] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.267881] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.max_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.268053] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.min_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.268227] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.peer_list = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.268392] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.region_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.268552] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.retriable_status_codes = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.268718] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.serial_console_state_timeout = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.268880] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.service_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.269063] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.service_type = baremetal {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.269228] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.shard = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.269394] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.269561] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.status_code_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.269719] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.status_code_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.269876] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.270070] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.270238] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ironic.version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.270426] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.270601] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] key_manager.fixed_key = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.270786] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.270949] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.barbican_api_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.271122] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.barbican_endpoint = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.271296] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.barbican_endpoint_type = public {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.271457] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.barbican_region_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.271616] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.271776] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.271939] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.272115] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.272275] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.272443] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.number_of_retries = 60 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.272606] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.retry_delay = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.272772] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.send_service_user_token = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.272934] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.273103] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.273272] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.verify_ssl = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.273431] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican.verify_ssl_path = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.273598] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican_service_user.auth_section = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.273762] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican_service_user.auth_type = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.273920] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican_service_user.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.274089] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican_service_user.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.274257] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican_service_user.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.274422] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican_service_user.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.274579] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican_service_user.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.274742] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican_service_user.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.274901] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] barbican_service_user.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.275076] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vault.approle_role_id = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.275239] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vault.approle_secret_id = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.275409] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vault.kv_mountpoint = secret {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.275572] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vault.kv_path = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.275739] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vault.kv_version = 2 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.275895] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vault.namespace = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.276064] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vault.root_token_id = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.276225] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vault.ssl_ca_crt_file = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.276393] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vault.timeout = 60.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.276554] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vault.use_ssl = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.276726] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.276895] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.277068] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.277243] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.277403] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.connect_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.277563] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.connect_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.277723] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.endpoint_override = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.277883] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.278053] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.278217] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.max_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.278375] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.min_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.278535] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.region_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.278695] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.retriable_status_codes = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.278855] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.service_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.279034] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.service_type = identity {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.279202] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.279365] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.status_code_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.279530] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.status_code_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.279682] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.279861] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.280029] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] keystone.version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.280225] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.ceph_mount_options = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.280527] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.280708] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.connection_uri = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.280872] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.cpu_mode = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.281051] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.cpu_model_extra_flags = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.281230] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.cpu_models = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.281410] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.cpu_power_governor_high = performance {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.281583] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.cpu_power_governor_low = powersave {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.281747] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.cpu_power_management = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.281918] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.282103] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.device_detach_attempts = 8 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.282279] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.device_detach_timeout = 20 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.282453] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.disk_cachemodes = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.282615] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.disk_prefix = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.282781] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.enabled_perf_events = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.282947] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.file_backed_memory = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.283130] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.gid_maps = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.283294] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.hw_disk_discard = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.283456] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.hw_machine_type = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.283626] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.images_rbd_ceph_conf = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.283791] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.283953] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.284138] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.images_rbd_glance_store_name = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.284309] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.images_rbd_pool = rbd {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.284482] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.images_type = default {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.284644] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.images_volume_group = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.284808] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.inject_key = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.284972] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.inject_partition = -2 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.285151] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.inject_password = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.285329] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.iscsi_iface = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.285478] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.iser_use_multipath = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.285643] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_bandwidth = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.285806] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_completion_timeout = 800 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.285969] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_downtime = 500 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.286148] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_downtime_delay = 75 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.286313] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_downtime_steps = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.286481] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_inbound_addr = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.286644] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_permit_auto_converge = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.286808] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_permit_post_copy = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.286969] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_scheme = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.287166] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_timeout_action = abort {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.287325] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_tunnelled = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.287485] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_uri = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.287646] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.live_migration_with_native_tls = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.287804] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.max_queues = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.287963] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.mem_stats_period_seconds = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.288223] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.288393] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.nfs_mount_options = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.288693] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.288875] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.num_aoe_discover_tries = 3 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.289057] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.num_iser_scan_tries = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.289226] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.num_memory_encrypted_guests = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.289396] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.num_nvme_discover_tries = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.289562] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.num_pcie_ports = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.289729] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.num_volume_scan_tries = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.289895] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.pmem_namespaces = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.290070] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.quobyte_client_cfg = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.290375] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.290557] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.rbd_connect_timeout = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.290728] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.290895] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.291070] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.rbd_secret_uuid = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.291236] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.rbd_user = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.291402] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.realtime_scheduler_priority = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.291575] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.remote_filesystem_transport = ssh {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.291735] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.rescue_image_id = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.291893] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.rescue_kernel_id = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.292063] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.rescue_ramdisk_id = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.292241] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.rng_dev_path = /dev/urandom {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.292406] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.rx_queue_size = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.292574] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.smbfs_mount_options = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.292874] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.293065] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.snapshot_compression = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.293236] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.snapshot_image_format = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.293469] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.293637] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.sparse_logical_volumes = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.293801] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.swtpm_enabled = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.293969] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.swtpm_group = tss {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.294151] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.swtpm_user = tss {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.294321] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.sysinfo_serial = unique {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.294487] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.tb_cache_size = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.294647] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.tx_queue_size = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.294813] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.uid_maps = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.294978] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.use_virtio_for_bridges = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.295164] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.virt_type = kvm {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.295338] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.volume_clear = zero {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.295505] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.volume_clear_size = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.295669] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.volume_use_multipath = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.295826] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.vzstorage_cache_path = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.295997] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.296182] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.vzstorage_mount_group = qemu {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.296348] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.vzstorage_mount_opts = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.296514] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.296811] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.296990] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.vzstorage_mount_user = stack {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.297193] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.297364] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.auth_section = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.297541] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.auth_type = password {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.297705] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.297865] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.298044] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.298216] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.connect_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.298379] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.connect_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.298549] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.default_floating_pool = public {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.298708] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.endpoint_override = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.298872] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.extension_sync_interval = 600 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.299046] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.http_retries = 3 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.299214] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.299375] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.299540] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.max_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.299714] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.metadata_proxy_shared_secret = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.299877] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.min_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.300054] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.ovs_bridge = br-int {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.300225] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.physnets = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.300397] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.region_name = RegionOne {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.300559] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.retriable_status_codes = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.300725] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.service_metadata_proxy = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.300883] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.service_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.301064] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.service_type = network {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.301230] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.301391] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.status_code_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.301550] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.status_code_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.301709] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.301890] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.302065] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] neutron.version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.302240] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] notifications.bdms_in_notifications = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.302419] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] notifications.default_level = INFO {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.302584] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] notifications.include_share_mapping = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.302756] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] notifications.notification_format = unversioned {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.302918] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] notifications.notify_on_state_change = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.303103] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.303284] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] pci.alias = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.303458] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] pci.device_spec = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.303623] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] pci.report_in_placement = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.303793] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.auth_section = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.303965] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.auth_type = password {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.304146] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.auth_url = http://10.180.1.21/identity {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.304307] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.304469] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.304630] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.304787] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.connect_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.304945] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.connect_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.305116] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.default_domain_id = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.305280] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.default_domain_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.305441] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.domain_id = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.305601] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.domain_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.305760] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.endpoint_override = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.305920] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.306091] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.306253] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.max_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.306412] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.min_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.306579] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.password = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.306738] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.project_domain_id = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.306903] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.project_domain_name = Default {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.307081] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.project_id = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.307261] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.project_name = service {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.307432] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.region_name = RegionOne {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.307595] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.retriable_status_codes = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.307756] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.service_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.307923] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.service_type = placement {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.308110] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.308277] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.status_code_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.308439] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.status_code_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.308599] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.system_scope = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.308758] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.308916] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.trust_id = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.309086] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.user_domain_id = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.309257] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.user_domain_name = Default {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.309420] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.user_id = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.309594] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.username = nova {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.309774] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.309935] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] placement.version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.310127] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.cores = 20 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.310297] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.count_usage_from_placement = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.310473] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.310643] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.injected_file_content_bytes = 10240 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.310812] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.injected_file_path_length = 255 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.310982] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.injected_files = 5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.311165] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.instances = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.311336] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.key_pairs = 100 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.311503] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.metadata_items = 128 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.311668] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.ram = 51200 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.311830] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.recheck_quota = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.311997] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.server_group_members = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.312180] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.server_groups = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.312390] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.unified_limits_resource_list = ['servers'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.312563] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] quota.unified_limits_resource_strategy = require {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.312743] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.312909] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.313086] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] scheduler.image_metadata_prefilter = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.313255] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.313425] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] scheduler.max_attempts = 3 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.313590] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] scheduler.max_placement_results = 1000 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.313755] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.313917] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] scheduler.query_placement_for_image_type_support = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.314097] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.314278] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] scheduler.workers = 2 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.314452] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.314623] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.314806] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.314975] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.315157] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.315325] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.315492] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.315678] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.315848] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.host_subset_size = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.316027] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.316192] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.image_properties_default_architecture = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.316365] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.316534] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.isolated_hosts = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.316700] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.isolated_images = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.316879] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.max_instances_per_host = 50 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.317061] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.317239] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.317402] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.pci_in_placement = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.317568] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.317734] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.317897] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.318072] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.318243] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.318411] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.318576] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.track_instance_changes = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.318756] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.318927] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] metrics.required = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.319107] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] metrics.weight_multiplier = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.319278] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] metrics.weight_of_unavailable = -10000.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.319446] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] metrics.weight_setting = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.319759] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.319935] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] serial_console.enabled = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.320126] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] serial_console.port_range = 10000:20000 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.320302] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.320473] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.320642] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] serial_console.serialproxy_port = 6083 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.320811] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] service_user.auth_section = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.320986] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] service_user.auth_type = password {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.321163] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] service_user.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.321328] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] service_user.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.321494] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] service_user.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.321654] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] service_user.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.321811] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] service_user.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.321981] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] service_user.send_service_user_token = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.322168] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] service_user.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.322335] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] service_user.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.322506] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.agent_enabled = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.322670] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.enabled = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.322976] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.323193] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.html5proxy_host = 0.0.0.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.323370] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.html5proxy_port = 6082 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.323534] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.image_compression = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.323694] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.jpeg_compression = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.323854] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.playback_compression = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.324028] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.require_secure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.324206] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.server_listen = 127.0.0.1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.324378] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.324538] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.streaming_mode = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.324696] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] spice.zlib_compression = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.324861] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] upgrade_levels.baseapi = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.325044] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] upgrade_levels.compute = auto {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.325212] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] upgrade_levels.conductor = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.325373] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] upgrade_levels.scheduler = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.325542] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vendordata_dynamic_auth.auth_section = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.325717] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vendordata_dynamic_auth.auth_type = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.325864] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vendordata_dynamic_auth.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.326033] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vendordata_dynamic_auth.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.326201] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vendordata_dynamic_auth.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.326367] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vendordata_dynamic_auth.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.326525] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vendordata_dynamic_auth.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.326688] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vendordata_dynamic_auth.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.326847] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vendordata_dynamic_auth.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.327033] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.api_retry_count = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.327231] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.ca_file = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.327380] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.cache_prefix = devstack-image-cache {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.327552] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.cluster_name = testcl1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.327720] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.connection_pool_size = 10 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.327880] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.console_delay_seconds = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.328062] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.datastore_regex = ^datastore.* {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.328279] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.328454] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.host_password = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.328622] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.host_port = 443 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.328789] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.host_username = administrator@vsphere.local {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.328957] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.insecure = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.329134] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.integration_bridge = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.329306] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.maximum_objects = 100 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.329497] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.pbm_default_policy = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.329630] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.pbm_enabled = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.329787] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.pbm_wsdl_location = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.329956] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.330132] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.serial_port_proxy_uri = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.330292] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.serial_port_service_uri = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.330465] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.task_poll_interval = 0.5 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.330640] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.use_linked_clone = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.330810] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.vnc_keymap = en-us {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.330978] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.vnc_port = 5900 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.331162] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vmware.vnc_port_total = 10000 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.331348] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vnc.auth_schemes = ['none'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.331522] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vnc.enabled = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.331819] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.332012] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.332194] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vnc.novncproxy_port = 6080 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.332397] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vnc.server_listen = 127.0.0.1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.332582] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.332746] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vnc.vencrypt_ca_certs = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.332906] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vnc.vencrypt_client_cert = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.333079] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vnc.vencrypt_client_key = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.333257] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.333423] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.disable_deep_image_inspection = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.333586] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.disable_fallback_pcpu_query = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.333748] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.disable_group_policy_check_upcall = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.333908] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.334086] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.disable_rootwrap = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.334251] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.enable_numa_live_migration = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.334417] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.334578] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.334741] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.handle_virt_lifecycle_events = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.334903] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.libvirt_disable_apic = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.335076] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.never_download_image_if_on_rbd = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.335247] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.335409] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.335572] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.335735] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.335946] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.336071] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.336235] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.336398] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.336563] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.336748] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.336919] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] wsgi.client_socket_timeout = 900 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.337097] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] wsgi.default_pool_size = 1000 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.337273] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] wsgi.keep_alive = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.337444] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] wsgi.max_header_line = 16384 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.337606] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] wsgi.secure_proxy_ssl_header = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.337768] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] wsgi.ssl_ca_file = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.337932] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] wsgi.ssl_cert_file = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.338107] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] wsgi.ssl_key_file = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.338282] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] wsgi.tcp_keepidle = 600 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.338464] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.338630] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] zvm.ca_file = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.338792] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] zvm.cloud_connector_url = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.339111] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.339294] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] zvm.reachable_timeout = 300 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.339500] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.339646] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.339821] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler.connection_string = messaging:// {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.339988] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler.enabled = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.340173] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler.es_doc_type = notification {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.340340] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler.es_scroll_size = 10000 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.340506] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler.es_scroll_time = 2m {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.340668] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler.filter_error_trace = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.340835] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler.hmac_keys = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.341010] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler.sentinel_service_name = mymaster {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.341183] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler.socket_timeout = 0.1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.341348] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler.trace_requests = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.341509] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler.trace_sqlalchemy = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.341684] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler_jaeger.process_tags = {} {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.341845] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler_jaeger.service_name_prefix = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.342013] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] profiler_otlp.service_name_prefix = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.342198] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] remote_debug.host = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.342362] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] remote_debug.port = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.342537] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.342701] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.342869] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.343043] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.343214] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.343379] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.343540] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.343702] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.343863] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.344047] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.hostname = devstack {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.344214] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.344386] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.344554] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.344721] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.344892] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.345067] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.345239] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.345413] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.345578] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.345741] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.345928] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.346083] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.346249] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.346413] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.346575] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.346736] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.346896] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.347067] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.347240] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.347401] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.ssl = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.347567] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.347733] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.347893] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.348068] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.348241] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.ssl_version = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.348404] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.348586] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.348752] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_notifications.retry = -1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.348927] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.349109] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_messaging_notifications.transport_url = **** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.349283] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.auth_section = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.349452] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.auth_type = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.349612] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.cafile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.349771] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.certfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.349933] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.collect_timing = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.350103] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.connect_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.350267] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.connect_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.350426] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.endpoint_id = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.350596] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.endpoint_interface = publicURL {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.350751] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.endpoint_override = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.350905] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.endpoint_region_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.351073] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.endpoint_service_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.351236] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.endpoint_service_type = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.351399] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.insecure = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.351555] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.keyfile = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.351712] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.max_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.351869] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.min_version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.352035] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.region_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.352197] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.retriable_status_codes = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.352355] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.service_name = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.352509] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.service_type = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.352669] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.split_loggers = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.352828] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.status_code_retries = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.352987] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.status_code_retry_delay = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.353162] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.timeout = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.353318] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.valid_interfaces = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.353477] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_limit.version = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.353643] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_reports.file_event_handler = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.353807] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_reports.file_event_handler_interval = 1 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.353965] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] oslo_reports.log_dir = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.354152] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.354315] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plug_linux_bridge_privileged.group = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.354475] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.354642] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.354806] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.354966] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plug_linux_bridge_privileged.user = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.355149] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.355309] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plug_ovs_privileged.group = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.355470] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plug_ovs_privileged.helper_command = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.355635] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.355798] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.355956] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] vif_plug_ovs_privileged.user = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.356141] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_linux_bridge.flat_interface = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.356321] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.356497] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.356668] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.356839] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.357021] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.357198] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.357364] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_linux_bridge.vlan_interface = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.357548] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.357717] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_ovs.isolate_vif = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.357890] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.358073] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.358250] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.358419] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_ovs.ovsdb_interface = native {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.358583] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] os_vif_ovs.per_port_bridge = False {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.358752] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] privsep_osbrick.capabilities = [21] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.358911] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] privsep_osbrick.group = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.359080] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] privsep_osbrick.helper_command = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.359251] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.359414] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] privsep_osbrick.thread_pool_size = 8 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.359572] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] privsep_osbrick.user = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.359743] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.359898] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] nova_sys_admin.group = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.360067] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] nova_sys_admin.helper_command = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.360235] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.360401] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] nova_sys_admin.thread_pool_size = 8 {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.360561] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] nova_sys_admin.user = None {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 510.360690] env[63028]: DEBUG oslo_service.service [None req-c6b0a95b-0867-40a8-9dea-6390ae07432c None None] ******************************************************************************** {{(pid=63028) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 510.361125] env[63028]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 510.865838] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Getting list of instances from cluster (obj){ [ 510.865838] env[63028]: value = "domain-c8" [ 510.865838] env[63028]: _type = "ClusterComputeResource" [ 510.865838] env[63028]: } {{(pid=63028) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 510.866411] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4948964d-a63c-4d2a-83a3-daf7cc693c60 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.875170] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Got total of 0 instances {{(pid=63028) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 510.875753] env[63028]: WARNING nova.virt.vmwareapi.driver [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 510.876239] env[63028]: INFO nova.virt.node [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Generated node identity 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 [ 510.876477] env[63028]: INFO nova.virt.node [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Wrote node identity 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 to /opt/stack/data/n-cpu-1/compute_id [ 511.378858] env[63028]: WARNING nova.compute.manager [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Compute nodes ['399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 512.384651] env[63028]: INFO nova.compute.manager [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 513.390983] env[63028]: WARNING nova.compute.manager [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 513.391369] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.391491] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.391640] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 513.391792] env[63028]: DEBUG nova.compute.resource_tracker [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63028) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 513.392726] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976fd7d8-5e17-49ad-998c-eba568e60876 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.400918] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f2476a-fb8d-46c9-b13a-a98be84b8a12 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.414583] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1f700e-a315-41e4-960c-f469c1d398bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.420979] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0aaab3-b5db-4378-ab7d-6f1c10b0c672 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.449703] env[63028]: DEBUG nova.compute.resource_tracker [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181147MB free_disk=111GB free_vcpus=48 pci_devices=None {{(pid=63028) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 513.449842] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.450050] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.952764] env[63028]: WARNING nova.compute.resource_tracker [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] No compute node record for cpu-1:399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 could not be found. [ 514.456848] env[63028]: INFO nova.compute.resource_tracker [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 [ 515.964768] env[63028]: DEBUG nova.compute.resource_tracker [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 515.965150] env[63028]: DEBUG nova.compute.resource_tracker [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 516.119663] env[63028]: INFO nova.scheduler.client.report [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] [req-84a4b4df-f2b2-40ff-900e-e655833f7805] Created resource provider record via placement API for resource provider with UUID 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 516.137429] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc525d0-27d6-438d-a351-964b30f05999 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.145320] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f791b37-7505-47b4-832c-bbd16d466ca9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.175541] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810be72f-b46e-4704-ac9e-a226b5c8ddc0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.182633] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fff6add-f58c-4383-803d-1a0994322163 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.195482] env[63028]: DEBUG nova.compute.provider_tree [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 516.731469] env[63028]: DEBUG nova.scheduler.client.report [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Updated inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 516.731703] env[63028]: DEBUG nova.compute.provider_tree [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 0 to 1 during operation: update_inventory {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 516.732015] env[63028]: DEBUG nova.compute.provider_tree [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 516.778359] env[63028]: DEBUG nova.compute.provider_tree [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 1 to 2 during operation: update_traits {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 517.282961] env[63028]: DEBUG nova.compute.resource_tracker [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 517.283290] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.833s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 517.283386] env[63028]: DEBUG nova.service [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Creating RPC server for service compute {{(pid=63028) start /opt/stack/nova/nova/service.py:186}} [ 517.297958] env[63028]: DEBUG nova.service [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] Join ServiceGroup membership for this service compute {{(pid=63028) start /opt/stack/nova/nova/service.py:203}} [ 517.297958] env[63028]: DEBUG nova.servicegroup.drivers.db [None req-2595f497-eaea-49b6-8d45-48516f284a40 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=63028) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 549.671688] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Acquiring lock "03a19e41-1146-4560-8d93-16a23aa952da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.672013] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Lock "03a19e41-1146-4560-8d93-16a23aa952da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.177298] env[63028]: DEBUG nova.compute.manager [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 550.716135] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.716135] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.716135] env[63028]: INFO nova.compute.claims [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 551.763561] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e0adf2-f317-426e-a2bc-c718a8de2e85 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.771656] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c3d9f5-1883-41ce-bd8d-7a0088cb2f3b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.801722] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4281ef7f-76f6-4420-bf4e-f85a05610ec6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.809819] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d0f0d4-801c-4429-b424-71bc69eed29c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.823845] env[63028]: DEBUG nova.compute.provider_tree [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.327666] env[63028]: DEBUG nova.scheduler.client.report [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 552.837895] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.124s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.838579] env[63028]: DEBUG nova.compute.manager [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 553.346072] env[63028]: DEBUG nova.compute.utils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 553.348541] env[63028]: DEBUG nova.compute.manager [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Not allocating networking since 'none' was specified. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 553.849694] env[63028]: DEBUG nova.compute.manager [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 554.249040] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "413f7fea-452b-463f-b396-cdd29e8ffa91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.249040] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "413f7fea-452b-463f-b396-cdd29e8ffa91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.751240] env[63028]: DEBUG nova.compute.manager [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 554.863180] env[63028]: DEBUG nova.compute.manager [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 554.905908] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "679fca11-7390-4596-ab74-2f82a6cf8858" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.906537] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "679fca11-7390-4596-ab74-2f82a6cf8858" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.284715] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.285017] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.288597] env[63028]: INFO nova.compute.claims [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 555.299074] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.408706] env[63028]: DEBUG nova.compute.manager [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 555.802934] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Getting list of instances from cluster (obj){ [ 555.802934] env[63028]: value = "domain-c8" [ 555.802934] env[63028]: _type = "ClusterComputeResource" [ 555.802934] env[63028]: } {{(pid=63028) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 555.805135] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33922f7a-1620-4deb-86f2-4d231da92f11 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.816923] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Got total of 0 instances {{(pid=63028) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 555.818968] env[63028]: WARNING nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] While synchronizing instance power states, found 1 instances in the database and 0 instances on the hypervisor. [ 555.818968] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Triggering sync for uuid 03a19e41-1146-4560-8d93-16a23aa952da {{(pid=63028) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 555.818968] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "03a19e41-1146-4560-8d93-16a23aa952da" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.818968] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.818968] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Getting list of instances from cluster (obj){ [ 555.818968] env[63028]: value = "domain-c8" [ 555.818968] env[63028]: _type = "ClusterComputeResource" [ 555.818968] env[63028]: } {{(pid=63028) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 555.820761] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e577ed07-eb1d-4a1c-9de0-6aebd923c56e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.830663] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Got total of 0 instances {{(pid=63028) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 555.946196] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.342909] env[63028]: DEBUG nova.virt.hardware [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 556.342909] env[63028]: DEBUG nova.virt.hardware [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 556.342909] env[63028]: DEBUG nova.virt.hardware [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 556.343358] env[63028]: DEBUG nova.virt.hardware [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 556.343525] env[63028]: DEBUG nova.virt.hardware [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 556.344231] env[63028]: DEBUG nova.virt.hardware [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 556.345077] env[63028]: DEBUG nova.virt.hardware [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 556.345226] env[63028]: DEBUG nova.virt.hardware [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 556.345954] env[63028]: DEBUG nova.virt.hardware [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 556.345954] env[63028]: DEBUG nova.virt.hardware [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 556.346042] env[63028]: DEBUG nova.virt.hardware [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 556.346908] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f619e369-a517-4319-9032-890bcf6bfa01 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.362812] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284faa91-b219-4ca7-8877-503da1944b78 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.389026] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8495b00-09d7-4650-b6f3-535f911c152b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.410055] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Instance VIF info [] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 556.421623] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 556.421943] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfbd9771-f38f-4250-95bb-dcf1b677d820 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.425093] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7481d4a-82f9-4ef8-95c3-9a4939eae41c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.443817] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a91bb7f-ffb0-4698-85cc-6113539b6256 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.448272] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Created folder: OpenStack in parent group-v4. [ 556.448466] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Creating folder: Project (7659149a9e984b4fa962068e27902f51). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 556.448706] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-458d7037-e8f9-4701-ad2b-3085ab9ed6b5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.486501] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a7d558-51e3-4a38-ae6c-003fd8dff274 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.489472] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Created folder: Project (7659149a9e984b4fa962068e27902f51) in parent group-v550570. [ 556.489472] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Creating folder: Instances. Parent ref: group-v550571. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 556.489651] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb903b68-b9c1-4962-94c1-b4da792ee70d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.497213] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19470a60-eef8-472d-bd7a-a83a9d713554 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.502828] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Created folder: Instances in parent group-v550571. [ 556.503114] env[63028]: DEBUG oslo.service.loopingcall [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.503670] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 556.503870] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f71bf77-9849-4a66-96a1-f2b0f3a82510 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.528342] env[63028]: DEBUG nova.compute.provider_tree [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 556.535108] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 556.535108] env[63028]: value = "task-2734814" [ 556.535108] env[63028]: _type = "Task" [ 556.535108] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.547323] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734814, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.035978] env[63028]: DEBUG nova.scheduler.client.report [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 557.051021] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734814, 'name': CreateVM_Task, 'duration_secs': 0.341149} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.051021] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 557.051021] env[63028]: DEBUG oslo_vmware.service [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b52c202-ce5a-42ff-94a0-5535f437a5dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.056641] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.056641] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.057863] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 557.057863] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6c4433f-790f-4c31-b413-75c41d9e93a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.063958] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 557.063958] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523e61f9-befb-7b1b-bbb0-3059a633d05b" [ 557.063958] env[63028]: _type = "Task" [ 557.063958] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.074182] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523e61f9-befb-7b1b-bbb0-3059a633d05b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.544400] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.544400] env[63028]: DEBUG nova.compute.manager [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 557.549018] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.601s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.549018] env[63028]: INFO nova.compute.claims [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 557.582845] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.583774] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 557.583864] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.584058] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.584470] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 557.585214] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-699b4306-a038-461b-8cf5-c9736bbcb5d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.605518] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 557.606479] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 557.609547] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb604a0a-0fb2-4eb7-a36b-3837fc7bb736 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.616164] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-316761f5-9d00-4fd6-a98e-52174735fd83 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.622412] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 557.622412] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f2cf6d-850b-5df1-cb03-06ea5c02e863" [ 557.622412] env[63028]: _type = "Task" [ 557.622412] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.633617] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f2cf6d-850b-5df1-cb03-06ea5c02e863, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.053338] env[63028]: DEBUG nova.compute.utils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 558.054877] env[63028]: DEBUG nova.compute.manager [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 558.055019] env[63028]: DEBUG nova.network.neutron [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 558.134584] env[63028]: DEBUG nova.policy [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd706aacdd72a4762b1af0b20ff69e30e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '381de553d9da4c94b923d790c12a28a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 558.149118] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Preparing fetch location {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 558.149645] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Creating directory with path [datastore1] vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 558.151044] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e95577f-a29b-4067-8351-eee8ecd63924 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.195093] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Created directory with path [datastore1] vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 558.197263] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Fetch image to [datastore1] vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 558.197263] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Downloading image file data f2ba2026-3f4b-431c-97c1-c4ba582a9907 to [datastore1] vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk on the data store datastore1 {{(pid=63028) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 558.197776] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791b441f-fc62-4b34-832a-f302278f6ea7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.210856] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e595ee-30f8-4886-bb23-a08c6b2cb41f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.221387] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef51d142-c175-4f69-8570-1c889dbfffef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.262403] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ecc182-f121-45ee-a719-bade015ac720 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.268442] env[63028]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-212a89c6-802a-4c33-ae2d-9c436c857f12 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.291746] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Downloading image file data f2ba2026-3f4b-431c-97c1-c4ba582a9907 to the data store datastore1 {{(pid=63028) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 558.317144] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Acquiring lock "a167df01-05e4-453d-8800-9c104d912474" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.317144] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Lock "a167df01-05e4-453d-8800-9c104d912474" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.375578] env[63028]: DEBUG oslo_vmware.rw_handles [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63028) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 558.564400] env[63028]: DEBUG nova.compute.manager [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 558.669216] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3876f8d6-2048-4611-8f05-1579b3a3ace6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.682373] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456fee7a-3a97-4a12-9e61-9805ad78ee29 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.720365] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a1415e-f6df-4437-92c8-afcf71dd4f81 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.730396] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b439703-0575-4013-baca-09c0c74c3880 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.745024] env[63028]: DEBUG nova.compute.provider_tree [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 558.820363] env[63028]: DEBUG nova.compute.manager [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 559.068370] env[63028]: DEBUG oslo_vmware.rw_handles [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Completed reading data from the image iterator. {{(pid=63028) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 559.073031] env[63028]: DEBUG oslo_vmware.rw_handles [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 559.079978] env[63028]: DEBUG nova.network.neutron [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Successfully created port: 892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 559.208946] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Downloaded image file data f2ba2026-3f4b-431c-97c1-c4ba582a9907 to vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk on the data store datastore1 {{(pid=63028) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 559.210566] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Caching image {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 559.210835] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Copying Virtual Disk [datastore1] vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk to [datastore1] vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 559.213901] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa0ba80c-7d2c-4a02-b2db-6eb1bf6da0e8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.221345] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 559.221345] env[63028]: value = "task-2734815" [ 559.221345] env[63028]: _type = "Task" [ 559.221345] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.230267] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734815, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.250866] env[63028]: DEBUG nova.scheduler.client.report [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 559.350139] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.466173] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "f311a533-5c48-410b-ba3b-58f0032c8816" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.466173] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "f311a533-5c48-410b-ba3b-58f0032c8816" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.473785] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquiring lock "94b1bf30-0f9b-4197-99ff-6631a13ab2d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.474332] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lock "94b1bf30-0f9b-4197-99ff-6631a13ab2d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.585118] env[63028]: DEBUG nova.compute.manager [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 559.625863] env[63028]: DEBUG nova.virt.hardware [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 559.626503] env[63028]: DEBUG nova.virt.hardware [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 559.626503] env[63028]: DEBUG nova.virt.hardware [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 559.626663] env[63028]: DEBUG nova.virt.hardware [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 559.626813] env[63028]: DEBUG nova.virt.hardware [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 559.627219] env[63028]: DEBUG nova.virt.hardware [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 559.628173] env[63028]: DEBUG nova.virt.hardware [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 559.628173] env[63028]: DEBUG nova.virt.hardware [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 559.628173] env[63028]: DEBUG nova.virt.hardware [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 559.628173] env[63028]: DEBUG nova.virt.hardware [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 559.628173] env[63028]: DEBUG nova.virt.hardware [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 559.629234] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc981c6-640b-4aab-906c-b0cffde9dbbb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.640487] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea4cff4-a2fb-4e97-8c93-0d80a31b58de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.732224] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734815, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.756163] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.209s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.756361] env[63028]: DEBUG nova.compute.manager [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 559.759793] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.410s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.761561] env[63028]: INFO nova.compute.claims [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 559.968320] env[63028]: DEBUG nova.compute.manager [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 559.978764] env[63028]: DEBUG nova.compute.manager [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 560.235740] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734815, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.71487} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.236215] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Copied Virtual Disk [datastore1] vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk to [datastore1] vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 560.236771] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Deleting the datastore file [datastore1] vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 560.237183] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67c3e6a4-778e-4b81-bd49-7698bfb9e696 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.244079] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 560.244079] env[63028]: value = "task-2734816" [ 560.244079] env[63028]: _type = "Task" [ 560.244079] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.253213] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.266769] env[63028]: DEBUG nova.compute.utils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 560.270786] env[63028]: DEBUG nova.compute.manager [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 560.271129] env[63028]: DEBUG nova.network.neutron [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 560.415244] env[63028]: DEBUG nova.policy [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8490a2071a04695a6e2702430c2a91a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfded6a4a6994d558e1c9c823d962d8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 560.500859] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.505848] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.757261] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023736} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.757457] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 560.758446] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Moving file from [datastore1] vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a/f2ba2026-3f4b-431c-97c1-c4ba582a9907 to [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907. {{(pid=63028) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 560.758446] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-4de85bc7-06d4-4bcc-b470-c18abe87ec48 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.766884] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 560.766884] env[63028]: value = "task-2734817" [ 560.766884] env[63028]: _type = "Task" [ 560.766884] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.771445] env[63028]: DEBUG nova.compute.manager [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 560.779360] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734817, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.903691] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b90def-a486-4692-a58b-8331976a6aa3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.913997] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9736a852-46dc-460d-93f9-89935ea9fe39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.964639] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b91f60-1c1b-482f-8447-fb09d4c424bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.977683] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22982ab4-ab37-4889-be7d-2e951f33f315 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.996650] env[63028]: DEBUG nova.compute.provider_tree [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.279248] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734817, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.025694} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.279248] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] File moved {{(pid=63028) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 561.279680] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Cleaning up location [datastore1] vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 561.279680] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Deleting the datastore file [datastore1] vmware_temp/e79f48e0-38e5-4d37-a5f0-a4d79d04169a {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 561.279680] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23265944-6dc9-4e02-97f4-0b9298023905 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.290134] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 561.290134] env[63028]: value = "task-2734818" [ 561.290134] env[63028]: _type = "Task" [ 561.290134] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.300012] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734818, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.503072] env[63028]: DEBUG nova.scheduler.client.report [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 561.788464] env[63028]: DEBUG nova.compute.manager [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 561.804413] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734818, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03146} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.804723] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 561.805753] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c740d4c3-926b-4efd-bf85-02336d6ccdaa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.816079] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 561.816079] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e07d95-fd43-7836-627b-759621b3b245" [ 561.816079] env[63028]: _type = "Task" [ 561.816079] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.824265] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e07d95-fd43-7836-627b-759621b3b245, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.835218] env[63028]: DEBUG nova.virt.hardware [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 561.835503] env[63028]: DEBUG nova.virt.hardware [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 561.835596] env[63028]: DEBUG nova.virt.hardware [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 561.835776] env[63028]: DEBUG nova.virt.hardware [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 561.835928] env[63028]: DEBUG nova.virt.hardware [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 561.836093] env[63028]: DEBUG nova.virt.hardware [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 561.836302] env[63028]: DEBUG nova.virt.hardware [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 561.836453] env[63028]: DEBUG nova.virt.hardware [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 561.836610] env[63028]: DEBUG nova.virt.hardware [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 561.836790] env[63028]: DEBUG nova.virt.hardware [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 561.836986] env[63028]: DEBUG nova.virt.hardware [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 561.837852] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8458129-355a-48bf-af31-e3af079248e2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.845491] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4048bf73-435e-4be4-9438-51ae04bfbf2e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.906154] env[63028]: DEBUG nova.network.neutron [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Successfully created port: 4d374544-7b1e-450d-a5ad-0ee73b15715d {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 562.012023] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.250s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.012023] env[63028]: DEBUG nova.compute.manager [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 562.012826] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.512s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.014469] env[63028]: INFO nova.compute.claims [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 562.132821] env[63028]: DEBUG nova.network.neutron [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Successfully updated port: 892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 562.328616] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e07d95-fd43-7836-627b-759621b3b245, 'name': SearchDatastore_Task, 'duration_secs': 0.009867} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.329077] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.329217] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 03a19e41-1146-4560-8d93-16a23aa952da/03a19e41-1146-4560-8d93-16a23aa952da.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 562.329453] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3524fd9e-11fd-4740-ab24-98160bc30212 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.338797] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 562.338797] env[63028]: value = "task-2734819" [ 562.338797] env[63028]: _type = "Task" [ 562.338797] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.348610] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734819, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.519088] env[63028]: DEBUG nova.compute.utils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 562.525287] env[63028]: DEBUG nova.compute.manager [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 562.525287] env[63028]: DEBUG nova.network.neutron [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 562.638284] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.638284] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquired lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.638284] env[63028]: DEBUG nova.network.neutron [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 562.672461] env[63028]: DEBUG nova.policy [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f194edeb9f644d3e935154bf2d17bf8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2d6f0972d424163992e4841a5a22052', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 562.851474] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734819, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511836} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.851765] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 03a19e41-1146-4560-8d93-16a23aa952da/03a19e41-1146-4560-8d93-16a23aa952da.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 562.852339] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 562.852339] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a9a73f5f-96b5-403f-85ce-afd066763d47 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.858536] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 562.858536] env[63028]: value = "task-2734820" [ 562.858536] env[63028]: _type = "Task" [ 562.858536] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.867959] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734820, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.047036] env[63028]: DEBUG nova.compute.manager [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 563.198096] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a920bcd6-5817-4f05-927e-319f9a82069b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.205970] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6526f7fc-6330-484b-86c1-53f63c1042a0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.243536] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163426b0-9afb-4330-983b-1c582b139338 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.251164] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c56ffb-b5d8-437e-a925-1d5de2f05d24 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.270435] env[63028]: DEBUG nova.compute.provider_tree [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.293973] env[63028]: DEBUG nova.network.neutron [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 563.380849] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734820, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066584} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.383884] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 563.386064] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad40456b-74e3-4f07-b683-31408f79b057 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.412086] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 03a19e41-1146-4560-8d93-16a23aa952da/03a19e41-1146-4560-8d93-16a23aa952da.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 563.412855] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cccc743-2fc7-4126-9ea3-fa35b3135bb4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.434410] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 563.434410] env[63028]: value = "task-2734821" [ 563.434410] env[63028]: _type = "Task" [ 563.434410] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.444311] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734821, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.705122] env[63028]: DEBUG nova.compute.manager [req-0159a93a-6acb-4f2b-952c-85c2c991c087 req-dc81ec9d-256d-408a-bfab-e75b6d000cf6 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Received event network-vif-plugged-892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 563.705122] env[63028]: DEBUG oslo_concurrency.lockutils [req-0159a93a-6acb-4f2b-952c-85c2c991c087 req-dc81ec9d-256d-408a-bfab-e75b6d000cf6 service nova] Acquiring lock "413f7fea-452b-463f-b396-cdd29e8ffa91-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.705259] env[63028]: DEBUG oslo_concurrency.lockutils [req-0159a93a-6acb-4f2b-952c-85c2c991c087 req-dc81ec9d-256d-408a-bfab-e75b6d000cf6 service nova] Lock "413f7fea-452b-463f-b396-cdd29e8ffa91-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.705463] env[63028]: DEBUG oslo_concurrency.lockutils [req-0159a93a-6acb-4f2b-952c-85c2c991c087 req-dc81ec9d-256d-408a-bfab-e75b6d000cf6 service nova] Lock "413f7fea-452b-463f-b396-cdd29e8ffa91-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.705557] env[63028]: DEBUG nova.compute.manager [req-0159a93a-6acb-4f2b-952c-85c2c991c087 req-dc81ec9d-256d-408a-bfab-e75b6d000cf6 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] No waiting events found dispatching network-vif-plugged-892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 563.705761] env[63028]: WARNING nova.compute.manager [req-0159a93a-6acb-4f2b-952c-85c2c991c087 req-dc81ec9d-256d-408a-bfab-e75b6d000cf6 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Received unexpected event network-vif-plugged-892c8e3d-851e-4ad1-bbab-938e49f4cba1 for instance with vm_state building and task_state spawning. [ 563.775145] env[63028]: DEBUG nova.scheduler.client.report [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 563.944538] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734821, 'name': ReconfigVM_Task, 'duration_secs': 0.284495} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.944840] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 03a19e41-1146-4560-8d93-16a23aa952da/03a19e41-1146-4560-8d93-16a23aa952da.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 563.945526] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-782d64ef-d21e-4155-9be7-2ef6ab6d5b0d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.952024] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 563.952024] env[63028]: value = "task-2734822" [ 563.952024] env[63028]: _type = "Task" [ 563.952024] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.961500] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734822, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.060548] env[63028]: DEBUG nova.compute.manager [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 564.092482] env[63028]: DEBUG nova.virt.hardware [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 564.092728] env[63028]: DEBUG nova.virt.hardware [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 564.092878] env[63028]: DEBUG nova.virt.hardware [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 564.093085] env[63028]: DEBUG nova.virt.hardware [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 564.093229] env[63028]: DEBUG nova.virt.hardware [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 564.093371] env[63028]: DEBUG nova.virt.hardware [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 564.093574] env[63028]: DEBUG nova.virt.hardware [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 564.093801] env[63028]: DEBUG nova.virt.hardware [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 564.093979] env[63028]: DEBUG nova.virt.hardware [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 564.094155] env[63028]: DEBUG nova.virt.hardware [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 564.094324] env[63028]: DEBUG nova.virt.hardware [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 564.096776] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7756e8e-d030-4c76-b360-098f210014ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.105206] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf46fa39-970b-4d2e-9f80-49d79dd7da31 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.157675] env[63028]: DEBUG nova.network.neutron [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updating instance_info_cache with network_info: [{"id": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "address": "fa:16:3e:2c:e4:c1", "network": {"id": "5f8a710c-b049-40fe-8cc4-00c97e954fd9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1979325486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "381de553d9da4c94b923d790c12a28a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap892c8e3d-85", "ovs_interfaceid": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.261735] env[63028]: DEBUG nova.network.neutron [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Successfully created port: 10ff9499-257b-4aba-99fa-2ca0aaedc466 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 564.281728] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.268s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.282381] env[63028]: DEBUG nova.compute.manager [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 564.284918] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.779s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.287141] env[63028]: INFO nova.compute.claims [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 564.465715] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734822, 'name': Rename_Task, 'duration_secs': 0.126122} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.466126] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 564.466297] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94f65df8-7451-4c27-95e6-de0000d5532b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.473477] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 564.473477] env[63028]: value = "task-2734823" [ 564.473477] env[63028]: _type = "Task" [ 564.473477] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.482874] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734823, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.663160] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Releasing lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.663160] env[63028]: DEBUG nova.compute.manager [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Instance network_info: |[{"id": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "address": "fa:16:3e:2c:e4:c1", "network": {"id": "5f8a710c-b049-40fe-8cc4-00c97e954fd9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1979325486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "381de553d9da4c94b923d790c12a28a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap892c8e3d-85", "ovs_interfaceid": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 564.663661] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:e4:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '892c8e3d-851e-4ad1-bbab-938e49f4cba1', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 564.674950] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Creating folder: Project (381de553d9da4c94b923d790c12a28a3). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 564.675254] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ebce5a42-8d9b-4e98-99a4-cca538852ca8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.687204] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Created folder: Project (381de553d9da4c94b923d790c12a28a3) in parent group-v550570. [ 564.687527] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Creating folder: Instances. Parent ref: group-v550574. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 564.687699] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f673ca8a-4f94-4a15-889d-9c33fb7f2c59 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.697050] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Created folder: Instances in parent group-v550574. [ 564.697311] env[63028]: DEBUG oslo.service.loopingcall [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 564.697501] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 564.697762] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-808741df-2c45-4498-a404-52de29e3d60b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.721806] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 564.721806] env[63028]: value = "task-2734826" [ 564.721806] env[63028]: _type = "Task" [ 564.721806] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.735878] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734826, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.791282] env[63028]: DEBUG nova.compute.utils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 564.795251] env[63028]: DEBUG nova.compute.manager [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 564.795251] env[63028]: DEBUG nova.network.neutron [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 564.983892] env[63028]: DEBUG oslo_vmware.api [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734823, 'name': PowerOnVM_Task, 'duration_secs': 0.481139} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.984174] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 564.984607] env[63028]: INFO nova.compute.manager [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Took 10.12 seconds to spawn the instance on the hypervisor. [ 564.984872] env[63028]: DEBUG nova.compute.manager [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 564.985632] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf5e1af-ff3b-4423-8e5f-20aee9470dfc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.992963] env[63028]: DEBUG nova.policy [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab9cb927bc134277bb980682fef01978', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ef9a42771824708832a74238bbe89c0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 565.229939] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734826, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.302263] env[63028]: DEBUG nova.compute.manager [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 565.443201] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56da5eb-cc7a-4c3f-b40c-60a9e64049d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.454498] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346b378e-e2d5-43d2-aa80-ddd8c33b91e0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.491575] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787ad61e-6c3f-486e-9d07-2a4ec70f9b8c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.499755] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cdbfa1-9966-4897-b857-ab8b78311c16 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.519590] env[63028]: DEBUG nova.compute.provider_tree [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 565.521181] env[63028]: INFO nova.compute.manager [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Took 14.84 seconds to build instance. [ 565.670325] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 565.670969] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 565.671498] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 565.671733] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 565.671999] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 565.672385] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 565.672772] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 565.673018] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63028) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 565.673219] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 565.733259] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734826, 'name': CreateVM_Task, 'duration_secs': 0.51764} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.733259] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 565.754129] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.754337] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.754912] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 565.755200] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75595dec-375e-4225-935f-5299bbd955e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.760400] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 565.760400] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ead979-cbcd-cd92-d78b-491f999163e9" [ 565.760400] env[63028]: _type = "Task" [ 565.760400] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.771114] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ead979-cbcd-cd92-d78b-491f999163e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.778808] env[63028]: DEBUG nova.network.neutron [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Successfully updated port: 4d374544-7b1e-450d-a5ad-0ee73b15715d {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 565.878335] env[63028]: DEBUG nova.network.neutron [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Successfully created port: c324d23d-8733-4dee-a740-12bc47cfb838 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 566.023816] env[63028]: DEBUG nova.scheduler.client.report [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 566.030523] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7791def6-c18f-4fb4-817f-1f20aef66632 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Lock "03a19e41-1146-4560-8d93-16a23aa952da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.358s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.030754] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "03a19e41-1146-4560-8d93-16a23aa952da" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 10.213s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.030882] env[63028]: INFO nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] During sync_power_state the instance has a pending task (spawning). Skip. [ 566.031020] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "03a19e41-1146-4560-8d93-16a23aa952da" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.132329] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Acquiring lock "f80df630-327b-4923-a785-5d2e48fe1f19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.132871] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Lock "f80df630-327b-4923-a785-5d2e48fe1f19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.176214] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.272887] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ead979-cbcd-cd92-d78b-491f999163e9, 'name': SearchDatastore_Task, 'duration_secs': 0.017141} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.272887] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.273105] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 566.273326] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.273479] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.273629] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 566.273882] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d8dc21a-43e9-46c0-9782-f97eadbdb8c0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.282082] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "refresh_cache-679fca11-7390-4596-ab74-2f82a6cf8858" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.282247] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquired lock "refresh_cache-679fca11-7390-4596-ab74-2f82a6cf8858" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.282398] env[63028]: DEBUG nova.network.neutron [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 566.284481] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 566.284662] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 566.285621] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01b7282e-e3c2-4544-9e02-3150228507a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.295786] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 566.295786] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52911ac3-1cd0-3673-b1a7-64ae1754965a" [ 566.295786] env[63028]: _type = "Task" [ 566.295786] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.304148] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52911ac3-1cd0-3673-b1a7-64ae1754965a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.315978] env[63028]: DEBUG nova.compute.manager [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 566.350403] env[63028]: DEBUG nova.virt.hardware [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 566.350928] env[63028]: DEBUG nova.virt.hardware [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 566.350928] env[63028]: DEBUG nova.virt.hardware [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 566.350999] env[63028]: DEBUG nova.virt.hardware [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 566.351660] env[63028]: DEBUG nova.virt.hardware [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 566.351660] env[63028]: DEBUG nova.virt.hardware [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 566.351660] env[63028]: DEBUG nova.virt.hardware [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 566.351660] env[63028]: DEBUG nova.virt.hardware [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 566.351804] env[63028]: DEBUG nova.virt.hardware [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 566.351902] env[63028]: DEBUG nova.virt.hardware [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 566.352226] env[63028]: DEBUG nova.virt.hardware [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 566.352903] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d104ae-5220-491d-ac6b-5a824c74e329 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.360935] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f70de0e-3b8f-4740-a686-14002b8794e4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.531966] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.247s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.532547] env[63028]: DEBUG nova.compute.manager [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 566.535209] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.359s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.535447] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.535648] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63028) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 566.536826] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e541af90-a15c-4c8d-b513-a72477360905 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.549916] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726a7ade-01ad-4d9d-9e6d-fb47948ab207 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.573215] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b913c44-ecdf-415f-ae96-cec934f2010c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.581485] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47306733-31e5-4f9f-bb54-256d55a8b771 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.622233] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181115MB free_disk=111GB free_vcpus=48 pci_devices=None {{(pid=63028) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 566.622233] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.622233] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.637113] env[63028]: DEBUG nova.compute.manager [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 566.811344] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52911ac3-1cd0-3673-b1a7-64ae1754965a, 'name': SearchDatastore_Task, 'duration_secs': 0.008619} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.812520] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7361482-c14a-4c71-a138-0c42352e1bcc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.819288] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 566.819288] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523cb878-3946-90a8-378e-d449b85277ce" [ 566.819288] env[63028]: _type = "Task" [ 566.819288] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.828777] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523cb878-3946-90a8-378e-d449b85277ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.886280] env[63028]: DEBUG nova.network.neutron [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.038881] env[63028]: DEBUG nova.compute.utils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 567.041303] env[63028]: DEBUG nova.compute.manager [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Not allocating networking since 'none' was specified. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 567.104059] env[63028]: DEBUG nova.compute.manager [req-13e63a72-72fc-4ebc-abfc-843449f1c68d req-235a0271-a16a-4afe-9241-dfde93487b0a service nova] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Received event network-vif-plugged-4d374544-7b1e-450d-a5ad-0ee73b15715d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 567.104407] env[63028]: DEBUG oslo_concurrency.lockutils [req-13e63a72-72fc-4ebc-abfc-843449f1c68d req-235a0271-a16a-4afe-9241-dfde93487b0a service nova] Acquiring lock "679fca11-7390-4596-ab74-2f82a6cf8858-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.104574] env[63028]: DEBUG oslo_concurrency.lockutils [req-13e63a72-72fc-4ebc-abfc-843449f1c68d req-235a0271-a16a-4afe-9241-dfde93487b0a service nova] Lock "679fca11-7390-4596-ab74-2f82a6cf8858-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.104938] env[63028]: DEBUG oslo_concurrency.lockutils [req-13e63a72-72fc-4ebc-abfc-843449f1c68d req-235a0271-a16a-4afe-9241-dfde93487b0a service nova] Lock "679fca11-7390-4596-ab74-2f82a6cf8858-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.105160] env[63028]: DEBUG nova.compute.manager [req-13e63a72-72fc-4ebc-abfc-843449f1c68d req-235a0271-a16a-4afe-9241-dfde93487b0a service nova] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] No waiting events found dispatching network-vif-plugged-4d374544-7b1e-450d-a5ad-0ee73b15715d {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 567.105361] env[63028]: WARNING nova.compute.manager [req-13e63a72-72fc-4ebc-abfc-843449f1c68d req-235a0271-a16a-4afe-9241-dfde93487b0a service nova] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Received unexpected event network-vif-plugged-4d374544-7b1e-450d-a5ad-0ee73b15715d for instance with vm_state building and task_state spawning. [ 567.166355] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.336552] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523cb878-3946-90a8-378e-d449b85277ce, 'name': SearchDatastore_Task, 'duration_secs': 0.023679} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.337108] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.337535] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 413f7fea-452b-463f-b396-cdd29e8ffa91/413f7fea-452b-463f-b396-cdd29e8ffa91.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 567.337966] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e063ff8c-f6b6-4ce6-b01f-564486f4e113 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.348889] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 567.348889] env[63028]: value = "task-2734827" [ 567.348889] env[63028]: _type = "Task" [ 567.348889] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.358793] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734827, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.498887] env[63028]: DEBUG nova.network.neutron [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Updating instance_info_cache with network_info: [{"id": "4d374544-7b1e-450d-a5ad-0ee73b15715d", "address": "fa:16:3e:03:f5:21", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d374544-7b", "ovs_interfaceid": "4d374544-7b1e-450d-a5ad-0ee73b15715d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.543515] env[63028]: DEBUG nova.compute.manager [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 567.667855] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 03a19e41-1146-4560-8d93-16a23aa952da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 567.667855] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 413f7fea-452b-463f-b396-cdd29e8ffa91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 567.667855] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 679fca11-7390-4596-ab74-2f82a6cf8858 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 567.667855] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance a167df01-05e4-453d-8800-9c104d912474 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 567.668034] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance f311a533-5c48-410b-ba3b-58f0032c8816 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 567.668034] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 94b1bf30-0f9b-4197-99ff-6631a13ab2d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 567.857408] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734827, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502687} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.857670] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 413f7fea-452b-463f-b396-cdd29e8ffa91/413f7fea-452b-463f-b396-cdd29e8ffa91.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 567.857838] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 567.858112] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7839821d-27c1-4979-a3c8-74fbae1c17f3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.867031] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 567.867031] env[63028]: value = "task-2734828" [ 567.867031] env[63028]: _type = "Task" [ 567.867031] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.876394] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734828, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.004951] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Releasing lock "refresh_cache-679fca11-7390-4596-ab74-2f82a6cf8858" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.005282] env[63028]: DEBUG nova.compute.manager [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Instance network_info: |[{"id": "4d374544-7b1e-450d-a5ad-0ee73b15715d", "address": "fa:16:3e:03:f5:21", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d374544-7b", "ovs_interfaceid": "4d374544-7b1e-450d-a5ad-0ee73b15715d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 568.005703] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:f5:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d374544-7b1e-450d-a5ad-0ee73b15715d', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 568.015892] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Creating folder: Project (dfded6a4a6994d558e1c9c823d962d8d). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 568.016246] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1b918a9-7fd1-477b-bc54-6adc833d88ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.027875] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Created folder: Project (dfded6a4a6994d558e1c9c823d962d8d) in parent group-v550570. [ 568.028109] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Creating folder: Instances. Parent ref: group-v550577. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 568.028477] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-800b7c2f-88a8-4338-9f94-7ec9f4063df4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.037235] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Created folder: Instances in parent group-v550577. [ 568.037562] env[63028]: DEBUG oslo.service.loopingcall [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.038466] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 568.039197] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a43af6d-c0d7-4e73-a5a3-99b7607a6392 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.069378] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 568.069378] env[63028]: value = "task-2734831" [ 568.069378] env[63028]: _type = "Task" [ 568.069378] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.079152] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734831, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.178026] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance f80df630-327b-4923-a785-5d2e48fe1f19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 568.178026] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 568.178026] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 568.237918] env[63028]: DEBUG nova.network.neutron [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Successfully updated port: 10ff9499-257b-4aba-99fa-2ca0aaedc466 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 568.336140] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e714fe1-59e3-468b-9629-f094e88171b1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.344983] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3bfb04-a7f5-475b-b81f-de6e9009bf2e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.386442] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009534a8-45ea-40d3-ba43-2f9d6ce6dc32 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.394195] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734828, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.234393} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.397896] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 568.397896] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d283a6-32d6-4109-81a1-df356271b334 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.400640] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6461fe0b-7d45-4151-a237-206a10b4e329 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.425164] env[63028]: DEBUG nova.compute.provider_tree [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.436162] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 413f7fea-452b-463f-b396-cdd29e8ffa91/413f7fea-452b-463f-b396-cdd29e8ffa91.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 568.437047] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a9bf0e0-3f68-4ef2-8414-be7054e004df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.455467] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 568.455467] env[63028]: value = "task-2734832" [ 568.455467] env[63028]: _type = "Task" [ 568.455467] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.464048] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734832, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.565683] env[63028]: DEBUG nova.compute.manager [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 568.579472] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734831, 'name': CreateVM_Task, 'duration_secs': 0.348288} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.579646] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 568.580896] env[63028]: DEBUG oslo_vmware.service [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893c837a-d203-4a5e-a754-e22752759db2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.588696] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.588872] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.589309] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 568.591326] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b3ff5be-d587-4fda-a206-e0ca82c6a82b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.596653] env[63028]: DEBUG nova.virt.hardware [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 568.596900] env[63028]: DEBUG nova.virt.hardware [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 568.597076] env[63028]: DEBUG nova.virt.hardware [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 568.597256] env[63028]: DEBUG nova.virt.hardware [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 568.597396] env[63028]: DEBUG nova.virt.hardware [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 568.597543] env[63028]: DEBUG nova.virt.hardware [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 568.597736] env[63028]: DEBUG nova.virt.hardware [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 568.597912] env[63028]: DEBUG nova.virt.hardware [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 568.598133] env[63028]: DEBUG nova.virt.hardware [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 568.598299] env[63028]: DEBUG nova.virt.hardware [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 568.598466] env[63028]: DEBUG nova.virt.hardware [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 568.599757] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09695484-c4bd-40bd-ab64-e6a91d0def90 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.603513] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 568.603513] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522a2760-12f2-cffb-f98c-c8754b1f15e3" [ 568.603513] env[63028]: _type = "Task" [ 568.603513] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.610569] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8215b7be-1615-426d-b880-712532d067ea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.618060] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.618307] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 568.618532] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.618670] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.618839] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 568.619096] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7d1c46a-f431-4844-811b-bb685cd0ff39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.628775] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Instance VIF info [] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 568.634427] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Creating folder: Project (76f830444f314d4588ca8c8ab03668bf). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 568.636184] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6fd029a9-c3c1-41b0-9b3c-a03e5e5502d8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.638691] env[63028]: DEBUG nova.compute.manager [req-c9ccfdc8-d49c-474b-8d87-84ac05500fb6 req-358a886d-9582-4d42-8343-9ede8523cba7 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Received event network-changed-892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 568.638953] env[63028]: DEBUG nova.compute.manager [req-c9ccfdc8-d49c-474b-8d87-84ac05500fb6 req-358a886d-9582-4d42-8343-9ede8523cba7 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Refreshing instance network info cache due to event network-changed-892c8e3d-851e-4ad1-bbab-938e49f4cba1. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 568.639977] env[63028]: DEBUG oslo_concurrency.lockutils [req-c9ccfdc8-d49c-474b-8d87-84ac05500fb6 req-358a886d-9582-4d42-8343-9ede8523cba7 service nova] Acquiring lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.639977] env[63028]: DEBUG oslo_concurrency.lockutils [req-c9ccfdc8-d49c-474b-8d87-84ac05500fb6 req-358a886d-9582-4d42-8343-9ede8523cba7 service nova] Acquired lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.639977] env[63028]: DEBUG nova.network.neutron [req-c9ccfdc8-d49c-474b-8d87-84ac05500fb6 req-358a886d-9582-4d42-8343-9ede8523cba7 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Refreshing network info cache for port 892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 568.641882] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 568.642067] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 568.644044] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d54798c-5717-46da-962b-9639b69f6074 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.650851] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96fa4619-1792-4bbf-bd14-53d644023fdb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.654694] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Created folder: Project (76f830444f314d4588ca8c8ab03668bf) in parent group-v550570. [ 568.654792] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Creating folder: Instances. Parent ref: group-v550580. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 568.655626] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8963da3-a54d-4e13-aa67-74b5b1266b85 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.660173] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 568.660173] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5211cb22-6d33-72c0-8d0b-81cc267a1cb1" [ 568.660173] env[63028]: _type = "Task" [ 568.660173] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.668721] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5211cb22-6d33-72c0-8d0b-81cc267a1cb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.669919] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Created folder: Instances in parent group-v550580. [ 568.670162] env[63028]: DEBUG oslo.service.loopingcall [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.670299] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 568.670555] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-acf69baa-66d9-4bd0-8ecb-85fa5b001768 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.687486] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 568.687486] env[63028]: value = "task-2734835" [ 568.687486] env[63028]: _type = "Task" [ 568.687486] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.695627] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734835, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.743393] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Acquiring lock "refresh_cache-a167df01-05e4-453d-8800-9c104d912474" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.743393] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Acquired lock "refresh_cache-a167df01-05e4-453d-8800-9c104d912474" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.743393] env[63028]: DEBUG nova.network.neutron [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 568.941529] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 568.973353] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734832, 'name': ReconfigVM_Task, 'duration_secs': 0.502151} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.974631] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 413f7fea-452b-463f-b396-cdd29e8ffa91/413f7fea-452b-463f-b396-cdd29e8ffa91.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 568.976669] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1f6d31e-c6be-43d2-b707-2ca4f34ef4fc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.985300] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 568.985300] env[63028]: value = "task-2734836" [ 568.985300] env[63028]: _type = "Task" [ 568.985300] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.995455] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734836, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.175465] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Preparing fetch location {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 569.176094] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Creating directory with path [datastore2] vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 569.176094] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbe8b380-d215-4e31-991d-19ec590823e1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.199718] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734835, 'name': CreateVM_Task, 'duration_secs': 0.420516} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.201192] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 569.201192] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Created directory with path [datastore2] vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 569.201355] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Fetch image to [datastore2] vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 569.201536] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Downloading image file data f2ba2026-3f4b-431c-97c1-c4ba582a9907 to [datastore2] vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk on the data store datastore2 {{(pid=63028) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 569.201959] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.202355] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.202434] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 569.203210] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f510bddc-218e-45b6-9dbd-4d546c3122df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.207246] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e2b6ccd-f3b4-4210-b086-7448d55272b1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.211579] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 569.211579] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5241e431-43f4-8409-7cd2-39060cba5d98" [ 569.211579] env[63028]: _type = "Task" [ 569.211579] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.218689] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a7d5e9-69f1-4cc7-88ff-492dfa2d54d8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.234898] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b1f110-3b04-41db-8621-4309f2538a6d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.245033] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.245033] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 569.245033] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.284873] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32219987-7aa4-44d9-8d2b-67c9c7495965 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.291293] env[63028]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7492e676-b657-4b15-9c1e-a7d6cd3e2881 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.315462] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "e20ed04f-205b-4aa9-b8b6-e352cd237412" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.315844] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "e20ed04f-205b-4aa9-b8b6-e352cd237412" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.320049] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Downloading image file data f2ba2026-3f4b-431c-97c1-c4ba582a9907 to the data store datastore2 {{(pid=63028) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 569.399182] env[63028]: DEBUG nova.network.neutron [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.401657] env[63028]: DEBUG oslo_vmware.rw_handles [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=63028) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 569.466989] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 569.467925] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.845s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.470617] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.304s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.472123] env[63028]: INFO nova.compute.claims [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 569.498535] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734836, 'name': Rename_Task, 'duration_secs': 0.300382} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.498873] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 569.499134] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6bc351f3-0073-4dfd-bfe5-e0cb78fcf1cd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.507358] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 569.507358] env[63028]: value = "task-2734837" [ 569.507358] env[63028]: _type = "Task" [ 569.507358] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.520024] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734837, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.623013] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Acquiring lock "1eeb96d1-6e03-4192-a9db-955444519fd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.623691] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Lock "1eeb96d1-6e03-4192-a9db-955444519fd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.820381] env[63028]: DEBUG nova.compute.manager [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 570.018994] env[63028]: DEBUG nova.network.neutron [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Updating instance_info_cache with network_info: [{"id": "10ff9499-257b-4aba-99fa-2ca0aaedc466", "address": "fa:16:3e:65:aa:a7", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10ff9499-25", "ovs_interfaceid": "10ff9499-257b-4aba-99fa-2ca0aaedc466", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.022329] env[63028]: DEBUG nova.network.neutron [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Successfully updated port: c324d23d-8733-4dee-a740-12bc47cfb838 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 570.041510] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734837, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.105051] env[63028]: DEBUG oslo_vmware.rw_handles [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Completed reading data from the image iterator. {{(pid=63028) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 570.105375] env[63028]: DEBUG oslo_vmware.rw_handles [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 570.129628] env[63028]: DEBUG nova.compute.manager [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 570.260203] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Downloaded image file data f2ba2026-3f4b-431c-97c1-c4ba582a9907 to vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk on the data store datastore2 {{(pid=63028) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 570.263035] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Caching image {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 570.263035] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Copying Virtual Disk [datastore2] vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk to [datastore2] vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 570.263445] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3251de9-e848-4f20-b276-eea64bc90559 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.271374] env[63028]: DEBUG nova.network.neutron [req-c9ccfdc8-d49c-474b-8d87-84ac05500fb6 req-358a886d-9582-4d42-8343-9ede8523cba7 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updated VIF entry in instance network info cache for port 892c8e3d-851e-4ad1-bbab-938e49f4cba1. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 570.271374] env[63028]: DEBUG nova.network.neutron [req-c9ccfdc8-d49c-474b-8d87-84ac05500fb6 req-358a886d-9582-4d42-8343-9ede8523cba7 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updating instance_info_cache with network_info: [{"id": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "address": "fa:16:3e:2c:e4:c1", "network": {"id": "5f8a710c-b049-40fe-8cc4-00c97e954fd9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1979325486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "381de553d9da4c94b923d790c12a28a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap892c8e3d-85", "ovs_interfaceid": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.280404] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 570.280404] env[63028]: value = "task-2734838" [ 570.280404] env[63028]: _type = "Task" [ 570.280404] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.292684] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734838, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.364632] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.532238] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "refresh_cache-f311a533-5c48-410b-ba3b-58f0032c8816" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.533883] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "refresh_cache-f311a533-5c48-410b-ba3b-58f0032c8816" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.533883] env[63028]: DEBUG nova.network.neutron [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 570.534866] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Releasing lock "refresh_cache-a167df01-05e4-453d-8800-9c104d912474" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.540314] env[63028]: DEBUG nova.compute.manager [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Instance network_info: |[{"id": "10ff9499-257b-4aba-99fa-2ca0aaedc466", "address": "fa:16:3e:65:aa:a7", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10ff9499-25", "ovs_interfaceid": "10ff9499-257b-4aba-99fa-2ca0aaedc466", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 570.540435] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:aa:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10ff9499-257b-4aba-99fa-2ca0aaedc466', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 570.546747] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Creating folder: Project (b2d6f0972d424163992e4841a5a22052). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 570.551515] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28a5c752-5bff-4ffa-bca1-0c748c7a880a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.553675] env[63028]: DEBUG oslo_vmware.api [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734837, 'name': PowerOnVM_Task, 'duration_secs': 1.020435} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.557437] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 570.557672] env[63028]: INFO nova.compute.manager [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Took 10.97 seconds to spawn the instance on the hypervisor. [ 570.557866] env[63028]: DEBUG nova.compute.manager [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 570.559331] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a158fe-11ba-40e3-a38f-15567876cffc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.570621] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Created folder: Project (b2d6f0972d424163992e4841a5a22052) in parent group-v550570. [ 570.570824] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Creating folder: Instances. Parent ref: group-v550583. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 570.571440] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c336eb88-bbf7-4603-9f70-e09ab2d6aebf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.583854] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Created folder: Instances in parent group-v550583. [ 570.583854] env[63028]: DEBUG oslo.service.loopingcall [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 570.583854] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a167df01-05e4-453d-8800-9c104d912474] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 570.583854] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-118bfd5d-1919-4523-be06-a21ed6ade856 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.616473] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 570.616473] env[63028]: value = "task-2734841" [ 570.616473] env[63028]: _type = "Task" [ 570.616473] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.623635] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734841, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.661329] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.711696] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4605a798-b3bd-429d-b549-3e915bfcdb3a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.723831] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc21ea44-2d04-41fa-bb78-5844dbe4aa8f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.764338] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae9f30d-ea04-48f0-8a04-6fa2b8dac7e9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.775895] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b146c6e6-cee8-4b79-8a2e-3541bdcf78ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.782673] env[63028]: DEBUG oslo_concurrency.lockutils [req-c9ccfdc8-d49c-474b-8d87-84ac05500fb6 req-358a886d-9582-4d42-8343-9ede8523cba7 service nova] Releasing lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.802883] env[63028]: DEBUG nova.compute.provider_tree [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 570.808155] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734838, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.835818] env[63028]: DEBUG nova.compute.manager [None req-984f2fe9-26e9-4bb1-8957-bd8258e39ddc tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 570.836810] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9392e40c-a0e4-402e-99d5-0afc1dc68832 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.086204] env[63028]: INFO nova.compute.manager [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Took 15.83 seconds to build instance. [ 571.125888] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734841, 'name': CreateVM_Task, 'duration_secs': 0.38942} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.126127] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a167df01-05e4-453d-8800-9c104d912474] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 571.127298] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.127427] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.127737] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 571.128048] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7656e444-7532-4117-803b-d8fa68f65b73 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.133973] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Waiting for the task: (returnval){ [ 571.133973] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520a14a0-66c8-c948-f23e-cbe9d8c5b7a1" [ 571.133973] env[63028]: _type = "Task" [ 571.133973] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.144357] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520a14a0-66c8-c948-f23e-cbe9d8c5b7a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.145213] env[63028]: DEBUG nova.network.neutron [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.297029] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734838, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.896456} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.297308] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Copied Virtual Disk [datastore2] vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk to [datastore2] vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 571.297495] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Deleting the datastore file [datastore2] vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907/tmp-sparse.vmdk {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 571.297755] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bfb89d9-e974-4a7e-b194-3419fdbe5e16 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.308411] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 571.308411] env[63028]: value = "task-2734842" [ 571.308411] env[63028]: _type = "Task" [ 571.308411] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.309395] env[63028]: DEBUG nova.scheduler.client.report [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 571.328618] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734842, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.354074] env[63028]: INFO nova.compute.manager [None req-984f2fe9-26e9-4bb1-8957-bd8258e39ddc tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] instance snapshotting [ 571.354074] env[63028]: DEBUG nova.objects.instance [None req-984f2fe9-26e9-4bb1-8957-bd8258e39ddc tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Lazy-loading 'flavor' on Instance uuid 03a19e41-1146-4560-8d93-16a23aa952da {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 571.374159] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.374570] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.590781] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d4920ddd-15db-4060-9c6b-ec4bd6fea66c tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "413f7fea-452b-463f-b396-cdd29e8ffa91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.342s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.648641] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.649023] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 571.649199] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.730824] env[63028]: DEBUG nova.network.neutron [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Updating instance_info_cache with network_info: [{"id": "c324d23d-8733-4dee-a740-12bc47cfb838", "address": "fa:16:3e:11:c2:3b", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc324d23d-87", "ovs_interfaceid": "c324d23d-8733-4dee-a740-12bc47cfb838", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.824343] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.354s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.827684] env[63028]: DEBUG nova.compute.manager [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 571.827684] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734842, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065549} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.827684] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.463s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.829608] env[63028]: INFO nova.compute.claims [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 571.832685] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 571.832798] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Moving file from [datastore2] vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf/f2ba2026-3f4b-431c-97c1-c4ba582a9907 to [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907. {{(pid=63028) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 571.833267] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-453a9e19-b71b-4e4c-be9b-53940125dd3d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.841357] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 571.841357] env[63028]: value = "task-2734843" [ 571.841357] env[63028]: _type = "Task" [ 571.841357] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.854047] env[63028]: DEBUG nova.compute.manager [req-72812952-dcd3-4f91-8026-1103b067a7a4 req-84d76dfc-0513-4bcb-a424-35d905223b1f service nova] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Received event network-changed-4d374544-7b1e-450d-a5ad-0ee73b15715d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 571.854378] env[63028]: DEBUG nova.compute.manager [req-72812952-dcd3-4f91-8026-1103b067a7a4 req-84d76dfc-0513-4bcb-a424-35d905223b1f service nova] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Refreshing instance network info cache due to event network-changed-4d374544-7b1e-450d-a5ad-0ee73b15715d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 571.854618] env[63028]: DEBUG oslo_concurrency.lockutils [req-72812952-dcd3-4f91-8026-1103b067a7a4 req-84d76dfc-0513-4bcb-a424-35d905223b1f service nova] Acquiring lock "refresh_cache-679fca11-7390-4596-ab74-2f82a6cf8858" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.854766] env[63028]: DEBUG oslo_concurrency.lockutils [req-72812952-dcd3-4f91-8026-1103b067a7a4 req-84d76dfc-0513-4bcb-a424-35d905223b1f service nova] Acquired lock "refresh_cache-679fca11-7390-4596-ab74-2f82a6cf8858" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.854961] env[63028]: DEBUG nova.network.neutron [req-72812952-dcd3-4f91-8026-1103b067a7a4 req-84d76dfc-0513-4bcb-a424-35d905223b1f service nova] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Refreshing network info cache for port 4d374544-7b1e-450d-a5ad-0ee73b15715d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 571.863619] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f35823c-056e-4cde-9885-4bffa57daf2c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.866346] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734843, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.882882] env[63028]: DEBUG nova.compute.manager [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 571.887457] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db5c135-98ff-4786-848e-97a6e6b03a47 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.235235] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "refresh_cache-f311a533-5c48-410b-ba3b-58f0032c8816" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.236637] env[63028]: DEBUG nova.compute.manager [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Instance network_info: |[{"id": "c324d23d-8733-4dee-a740-12bc47cfb838", "address": "fa:16:3e:11:c2:3b", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc324d23d-87", "ovs_interfaceid": "c324d23d-8733-4dee-a740-12bc47cfb838", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 572.237628] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:c2:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c330dbdb-ad20-4e7e-8a12-66e4a914a84a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c324d23d-8733-4dee-a740-12bc47cfb838', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 572.251806] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating folder: Project (5ef9a42771824708832a74238bbe89c0). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 572.255246] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb5e5dee-baca-4632-b2b1-3f1e67784fad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.268626] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Created folder: Project (5ef9a42771824708832a74238bbe89c0) in parent group-v550570. [ 572.268626] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating folder: Instances. Parent ref: group-v550586. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 572.268626] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6836a3ed-7f9d-45e1-86dc-f024b5fd3cba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.281608] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Created folder: Instances in parent group-v550586. [ 572.281608] env[63028]: DEBUG oslo.service.loopingcall [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.281608] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 572.281608] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8480b114-7124-4641-9e44-21ed9405818e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.303935] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 572.303935] env[63028]: value = "task-2734846" [ 572.303935] env[63028]: _type = "Task" [ 572.303935] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.315254] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734846, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.338340] env[63028]: DEBUG nova.compute.utils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 572.339835] env[63028]: DEBUG nova.compute.manager [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 572.339995] env[63028]: DEBUG nova.network.neutron [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 572.357577] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734843, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.035503} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.358146] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] File moved {{(pid=63028) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 572.358361] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Cleaning up location [datastore2] vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 572.358530] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Deleting the datastore file [datastore2] vmware_temp/e6315db8-d49c-48bf-b10c-b977947d5bdf {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 572.358797] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-febbf3f6-13d2-4d7b-a5b0-d27f70304553 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.370492] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 572.370492] env[63028]: value = "task-2734847" [ 572.370492] env[63028]: _type = "Task" [ 572.370492] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.389723] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.401801] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-984f2fe9-26e9-4bb1-8957-bd8258e39ddc tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 572.401801] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0d4d16f9-f214-412e-900e-fdd586434701 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.411776] env[63028]: DEBUG oslo_vmware.api [None req-984f2fe9-26e9-4bb1-8957-bd8258e39ddc tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 572.411776] env[63028]: value = "task-2734848" [ 572.411776] env[63028]: _type = "Task" [ 572.411776] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.423694] env[63028]: DEBUG oslo_vmware.api [None req-984f2fe9-26e9-4bb1-8957-bd8258e39ddc tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734848, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.425260] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.467243] env[63028]: DEBUG nova.policy [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7f3ff447a5474f9686d9be9008be1901', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb987fc85bab4863a97bfabd29b5b141', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 572.634653] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Acquiring lock "67440140-a619-41f2-98fe-eff23e8ad8a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.634816] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Lock "67440140-a619-41f2-98fe-eff23e8ad8a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.716168] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Acquiring lock "03a19e41-1146-4560-8d93-16a23aa952da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.716565] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Lock "03a19e41-1146-4560-8d93-16a23aa952da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.716887] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Acquiring lock "03a19e41-1146-4560-8d93-16a23aa952da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.717212] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Lock "03a19e41-1146-4560-8d93-16a23aa952da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.717512] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Lock "03a19e41-1146-4560-8d93-16a23aa952da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.721106] env[63028]: INFO nova.compute.manager [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Terminating instance [ 572.816324] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734846, 'name': CreateVM_Task, 'duration_secs': 0.378988} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.816555] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 572.817278] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.817481] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.817792] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 572.818265] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82838e22-855f-45e9-bd16-19d42f5e4ef4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.825141] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 572.825141] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5243f197-74ea-ca06-1154-5fc08bb1294d" [ 572.825141] env[63028]: _type = "Task" [ 572.825141] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.833507] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5243f197-74ea-ca06-1154-5fc08bb1294d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.846400] env[63028]: DEBUG nova.compute.manager [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 572.887408] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734847, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041242} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.887658] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 572.888506] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb47b39d-9988-464d-adee-ad2c75bf5012 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.895251] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 572.895251] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fd6c0d-6a8f-47f8-d511-dbedfbd9f0c1" [ 572.895251] env[63028]: _type = "Task" [ 572.895251] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.904761] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fd6c0d-6a8f-47f8-d511-dbedfbd9f0c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.926038] env[63028]: DEBUG oslo_vmware.api [None req-984f2fe9-26e9-4bb1-8957-bd8258e39ddc tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734848, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.078498] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8493d8c7-4399-4949-83fa-60e8aa514148 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.088149] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b882742-ac75-4ecb-b9f7-0894d9df44f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.124943] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42e9768-819e-4142-9a6d-167047ce637c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.132643] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e4e965-ca3d-4e90-9fde-71dc98462ca4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.151035] env[63028]: DEBUG nova.compute.manager [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 573.151035] env[63028]: DEBUG nova.compute.provider_tree [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 573.232709] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Acquiring lock "refresh_cache-03a19e41-1146-4560-8d93-16a23aa952da" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.232897] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Acquired lock "refresh_cache-03a19e41-1146-4560-8d93-16a23aa952da" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.233095] env[63028]: DEBUG nova.network.neutron [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 573.334910] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5243f197-74ea-ca06-1154-5fc08bb1294d, 'name': SearchDatastore_Task, 'duration_secs': 0.012818} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.335297] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.335579] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 573.335835] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.408258] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fd6c0d-6a8f-47f8-d511-dbedfbd9f0c1, 'name': SearchDatastore_Task, 'duration_secs': 0.011803} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.408540] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.408798] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 679fca11-7390-4596-ab74-2f82a6cf8858/679fca11-7390-4596-ab74-2f82a6cf8858.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 573.409084] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.409273] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 573.409486] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b9e0c05-39e2-4760-afd1-6fb86f17b658 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.411464] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6165af58-03b8-40aa-8b3b-e19766c44c4f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.421905] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 573.421905] env[63028]: value = "task-2734849" [ 573.421905] env[63028]: _type = "Task" [ 573.421905] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.422953] env[63028]: DEBUG nova.network.neutron [req-72812952-dcd3-4f91-8026-1103b067a7a4 req-84d76dfc-0513-4bcb-a424-35d905223b1f service nova] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Updated VIF entry in instance network info cache for port 4d374544-7b1e-450d-a5ad-0ee73b15715d. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 573.423496] env[63028]: DEBUG nova.network.neutron [req-72812952-dcd3-4f91-8026-1103b067a7a4 req-84d76dfc-0513-4bcb-a424-35d905223b1f service nova] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Updating instance_info_cache with network_info: [{"id": "4d374544-7b1e-450d-a5ad-0ee73b15715d", "address": "fa:16:3e:03:f5:21", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d374544-7b", "ovs_interfaceid": "4d374544-7b1e-450d-a5ad-0ee73b15715d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.432782] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 573.433478] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 573.435464] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5047b8d5-8aee-4468-9b8d-2a10c5e3c53f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.445217] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734849, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.446424] env[63028]: DEBUG oslo_vmware.api [None req-984f2fe9-26e9-4bb1-8957-bd8258e39ddc tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734848, 'name': CreateSnapshot_Task, 'duration_secs': 0.838541} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.447105] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-984f2fe9-26e9-4bb1-8957-bd8258e39ddc tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 573.451746] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a375b9a0-ea5b-4083-a7ff-f1399d7602d4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.453170] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 573.453170] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52140d9b-4e97-8e18-082c-515e34b42cbf" [ 573.453170] env[63028]: _type = "Task" [ 573.453170] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.470443] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52140d9b-4e97-8e18-082c-515e34b42cbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.571516] env[63028]: DEBUG nova.compute.manager [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: a167df01-05e4-453d-8800-9c104d912474] Received event network-vif-plugged-10ff9499-257b-4aba-99fa-2ca0aaedc466 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 573.571516] env[63028]: DEBUG oslo_concurrency.lockutils [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] Acquiring lock "a167df01-05e4-453d-8800-9c104d912474-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.572216] env[63028]: DEBUG oslo_concurrency.lockutils [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] Lock "a167df01-05e4-453d-8800-9c104d912474-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.573349] env[63028]: DEBUG oslo_concurrency.lockutils [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] Lock "a167df01-05e4-453d-8800-9c104d912474-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.574827] env[63028]: DEBUG nova.compute.manager [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: a167df01-05e4-453d-8800-9c104d912474] No waiting events found dispatching network-vif-plugged-10ff9499-257b-4aba-99fa-2ca0aaedc466 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 573.575108] env[63028]: WARNING nova.compute.manager [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: a167df01-05e4-453d-8800-9c104d912474] Received unexpected event network-vif-plugged-10ff9499-257b-4aba-99fa-2ca0aaedc466 for instance with vm_state building and task_state spawning. [ 573.575525] env[63028]: DEBUG nova.compute.manager [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: a167df01-05e4-453d-8800-9c104d912474] Received event network-changed-10ff9499-257b-4aba-99fa-2ca0aaedc466 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 573.576039] env[63028]: DEBUG nova.compute.manager [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: a167df01-05e4-453d-8800-9c104d912474] Refreshing instance network info cache due to event network-changed-10ff9499-257b-4aba-99fa-2ca0aaedc466. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 573.576251] env[63028]: DEBUG oslo_concurrency.lockutils [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] Acquiring lock "refresh_cache-a167df01-05e4-453d-8800-9c104d912474" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.576581] env[63028]: DEBUG oslo_concurrency.lockutils [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] Acquired lock "refresh_cache-a167df01-05e4-453d-8800-9c104d912474" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.577886] env[63028]: DEBUG nova.network.neutron [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: a167df01-05e4-453d-8800-9c104d912474] Refreshing network info cache for port 10ff9499-257b-4aba-99fa-2ca0aaedc466 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 573.656096] env[63028]: DEBUG nova.scheduler.client.report [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 573.681770] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.804308] env[63028]: DEBUG nova.network.neutron [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 573.857621] env[63028]: DEBUG nova.compute.manager [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 573.889935] env[63028]: DEBUG nova.virt.hardware [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 573.890612] env[63028]: DEBUG nova.virt.hardware [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 573.890787] env[63028]: DEBUG nova.virt.hardware [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 573.891056] env[63028]: DEBUG nova.virt.hardware [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 573.891220] env[63028]: DEBUG nova.virt.hardware [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 573.891388] env[63028]: DEBUG nova.virt.hardware [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 573.891635] env[63028]: DEBUG nova.virt.hardware [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 573.891812] env[63028]: DEBUG nova.virt.hardware [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 573.891990] env[63028]: DEBUG nova.virt.hardware [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 573.892173] env[63028]: DEBUG nova.virt.hardware [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 573.892346] env[63028]: DEBUG nova.virt.hardware [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 573.894791] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0bd1f9d-6880-492c-8b43-16e8f55288cb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.904502] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ed5b93-7d5d-4df3-833f-0f28aaa48316 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.930125] env[63028]: DEBUG oslo_concurrency.lockutils [req-72812952-dcd3-4f91-8026-1103b067a7a4 req-84d76dfc-0513-4bcb-a424-35d905223b1f service nova] Releasing lock "refresh_cache-679fca11-7390-4596-ab74-2f82a6cf8858" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.938773] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734849, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.967235] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52140d9b-4e97-8e18-082c-515e34b42cbf, 'name': SearchDatastore_Task, 'duration_secs': 0.021062} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.967930] env[63028]: DEBUG nova.compute.manager [None req-984f2fe9-26e9-4bb1-8957-bd8258e39ddc tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Instance disappeared during snapshot {{(pid=63028) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 573.970853] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cee277ff-cde5-4bd2-8843-0a19e05f7ca0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.977029] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 573.977029] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526bed57-a25b-eeff-9874-66a670abb022" [ 573.977029] env[63028]: _type = "Task" [ 573.977029] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.985762] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526bed57-a25b-eeff-9874-66a670abb022, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.065179] env[63028]: DEBUG nova.network.neutron [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.163774] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.336s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.164318] env[63028]: DEBUG nova.compute.manager [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 574.170465] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.509s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.171891] env[63028]: INFO nova.compute.claims [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 574.202050] env[63028]: DEBUG nova.compute.manager [None req-984f2fe9-26e9-4bb1-8957-bd8258e39ddc tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Found 0 images (rotation: 2) {{(pid=63028) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 574.285626] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "d663c2df-ae54-4c50-a70f-e2180700c700" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.285863] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "d663c2df-ae54-4c50-a70f-e2180700c700" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.311794] env[63028]: DEBUG nova.network.neutron [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Successfully created port: 0756fdb5-7e3d-45e1-b815-668c72c5e804 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 574.446539] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734849, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67567} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.446539] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 679fca11-7390-4596-ab74-2f82a6cf8858/679fca11-7390-4596-ab74-2f82a6cf8858.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 574.446539] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 574.446539] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c420c57-1d4a-432d-838d-513396678058 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.458435] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 574.458435] env[63028]: value = "task-2734850" [ 574.458435] env[63028]: _type = "Task" [ 574.458435] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.468731] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734850, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.490253] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526bed57-a25b-eeff-9874-66a670abb022, 'name': SearchDatastore_Task, 'duration_secs': 0.066813} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.490571] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.490856] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1/94b1bf30-0f9b-4197-99ff-6631a13ab2d1.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 574.491149] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.491333] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 574.491533] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9eec19eb-80c0-4d2e-83ea-218e3061c081 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.494109] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f70a45a-33a3-4b85-b3f0-e2c541e89e54 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.500175] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 574.500175] env[63028]: value = "task-2734851" [ 574.500175] env[63028]: _type = "Task" [ 574.500175] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.504770] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 574.505443] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 574.506973] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5ec1fa1-d0dc-4158-a8d7-63acd27114d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.512792] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734851, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.516070] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Waiting for the task: (returnval){ [ 574.516070] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52df03c2-5f6b-82f7-3b00-7443058f3f8c" [ 574.516070] env[63028]: _type = "Task" [ 574.516070] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.527120] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52df03c2-5f6b-82f7-3b00-7443058f3f8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.568204] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Releasing lock "refresh_cache-03a19e41-1146-4560-8d93-16a23aa952da" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.568667] env[63028]: DEBUG nova.compute.manager [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 574.568864] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 574.569842] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc51634-a3cb-45ac-965e-7f3f96c7d867 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.579868] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 574.581022] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e0a850c-9997-4032-8b7b-0e7c9cdd0492 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.587556] env[63028]: DEBUG oslo_vmware.api [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 574.587556] env[63028]: value = "task-2734852" [ 574.587556] env[63028]: _type = "Task" [ 574.587556] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.603587] env[63028]: DEBUG oslo_vmware.api [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734852, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.684643] env[63028]: DEBUG nova.compute.utils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 574.686749] env[63028]: DEBUG nova.compute.manager [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 574.687230] env[63028]: DEBUG nova.network.neutron [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 574.789190] env[63028]: DEBUG nova.compute.manager [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 574.833994] env[63028]: DEBUG nova.policy [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4dfc1e9e9e74072949517c7b930c147', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43b7c1be3c4343a4b4f288a355170873', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 574.836703] env[63028]: DEBUG nova.network.neutron [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: a167df01-05e4-453d-8800-9c104d912474] Updated VIF entry in instance network info cache for port 10ff9499-257b-4aba-99fa-2ca0aaedc466. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 574.839537] env[63028]: DEBUG nova.network.neutron [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: a167df01-05e4-453d-8800-9c104d912474] Updating instance_info_cache with network_info: [{"id": "10ff9499-257b-4aba-99fa-2ca0aaedc466", "address": "fa:16:3e:65:aa:a7", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10ff9499-25", "ovs_interfaceid": "10ff9499-257b-4aba-99fa-2ca0aaedc466", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.973168] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734850, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.138628} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.973453] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 574.974867] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1467868-9198-4c44-8d89-bef5e3b28202 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.006404] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 679fca11-7390-4596-ab74-2f82a6cf8858/679fca11-7390-4596-ab74-2f82a6cf8858.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 575.006668] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a2c12ec-83a4-4f8d-a6bf-14132e842809 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.036151] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734851, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.041266] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52df03c2-5f6b-82f7-3b00-7443058f3f8c, 'name': SearchDatastore_Task, 'duration_secs': 0.023373} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.044317] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 575.044317] env[63028]: value = "task-2734853" [ 575.044317] env[63028]: _type = "Task" [ 575.044317] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.045287] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-591c5f24-0a99-4585-9eb8-d94dde3de615 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.054275] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Waiting for the task: (returnval){ [ 575.054275] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5273e1a7-cadb-c4ff-cdd5-79f218d85316" [ 575.054275] env[63028]: _type = "Task" [ 575.054275] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.056971] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734853, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.064591] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5273e1a7-cadb-c4ff-cdd5-79f218d85316, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.090724] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "5a330ed9-c106-49f2-b524-a424e717b5ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.090878] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "5a330ed9-c106-49f2-b524-a424e717b5ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.101065] env[63028]: DEBUG oslo_vmware.api [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734852, 'name': PowerOffVM_Task, 'duration_secs': 0.193098} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.101065] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 575.101288] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 575.101857] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb896299-444a-4d33-ba34-bdbb429da9a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.137241] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 575.137332] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 575.137574] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Deleting the datastore file [datastore1] 03a19e41-1146-4560-8d93-16a23aa952da {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 575.137834] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f9c31a6-6e39-4fb6-beb9-ddacef149cd1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.144228] env[63028]: DEBUG oslo_vmware.api [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for the task: (returnval){ [ 575.144228] env[63028]: value = "task-2734855" [ 575.144228] env[63028]: _type = "Task" [ 575.144228] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.157674] env[63028]: DEBUG oslo_vmware.api [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.194628] env[63028]: DEBUG nova.compute.manager [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 575.322844] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.341496] env[63028]: DEBUG oslo_concurrency.lockutils [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] Releasing lock "refresh_cache-a167df01-05e4-453d-8800-9c104d912474" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.341770] env[63028]: DEBUG nova.compute.manager [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Received event network-vif-plugged-c324d23d-8733-4dee-a740-12bc47cfb838 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 575.341969] env[63028]: DEBUG oslo_concurrency.lockutils [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] Acquiring lock "f311a533-5c48-410b-ba3b-58f0032c8816-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.342191] env[63028]: DEBUG oslo_concurrency.lockutils [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] Lock "f311a533-5c48-410b-ba3b-58f0032c8816-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.342352] env[63028]: DEBUG oslo_concurrency.lockutils [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] Lock "f311a533-5c48-410b-ba3b-58f0032c8816-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.342519] env[63028]: DEBUG nova.compute.manager [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] No waiting events found dispatching network-vif-plugged-c324d23d-8733-4dee-a740-12bc47cfb838 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 575.342685] env[63028]: WARNING nova.compute.manager [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Received unexpected event network-vif-plugged-c324d23d-8733-4dee-a740-12bc47cfb838 for instance with vm_state building and task_state spawning. [ 575.342846] env[63028]: DEBUG nova.compute.manager [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Received event network-changed-c324d23d-8733-4dee-a740-12bc47cfb838 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 575.343014] env[63028]: DEBUG nova.compute.manager [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Refreshing instance network info cache due to event network-changed-c324d23d-8733-4dee-a740-12bc47cfb838. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 575.343240] env[63028]: DEBUG oslo_concurrency.lockutils [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] Acquiring lock "refresh_cache-f311a533-5c48-410b-ba3b-58f0032c8816" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.343390] env[63028]: DEBUG oslo_concurrency.lockutils [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] Acquired lock "refresh_cache-f311a533-5c48-410b-ba3b-58f0032c8816" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.343551] env[63028]: DEBUG nova.network.neutron [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Refreshing network info cache for port c324d23d-8733-4dee-a740-12bc47cfb838 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 575.452363] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6392c5ef-c493-48d6-9f8d-88cde1408e0c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.459974] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390b1e87-ee37-4a57-a852-88ca3c091759 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.491986] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df933cde-4b02-4e21-a372-ca3989b2d096 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.499976] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9cf8b5a-2e08-4bd3-a70e-75408aff4589 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.516031] env[63028]: DEBUG nova.compute.provider_tree [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.524658] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734851, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.558559] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734853, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.570343] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5273e1a7-cadb-c4ff-cdd5-79f218d85316, 'name': SearchDatastore_Task, 'duration_secs': 0.025604} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.570343] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.570649] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] a167df01-05e4-453d-8800-9c104d912474/a167df01-05e4-453d-8800-9c104d912474.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 575.570970] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.571216] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 575.571690] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d5cb3f5-2665-4db1-b920-ef138aac6f6e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.575742] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05d19569-8770-4be5-a440-63c8648a1126 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.581352] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Waiting for the task: (returnval){ [ 575.581352] env[63028]: value = "task-2734856" [ 575.581352] env[63028]: _type = "Task" [ 575.581352] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.582664] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 575.582734] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 575.590137] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1e0ffc6-319f-4078-a58e-ad6015181df1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.598412] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 575.598412] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c134d4-15be-15b2-8809-6c3bbf89a729" [ 575.598412] env[63028]: _type = "Task" [ 575.598412] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.602914] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734856, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.613314] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c134d4-15be-15b2-8809-6c3bbf89a729, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.657136] env[63028]: DEBUG oslo_vmware.api [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.005857] env[63028]: DEBUG nova.network.neutron [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Successfully created port: 735ece36-635c-498c-ad00-02b6ef32bba7 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 576.024457] env[63028]: DEBUG nova.scheduler.client.report [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 576.036455] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734851, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.062585] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734853, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.095293] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734856, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.113834] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c134d4-15be-15b2-8809-6c3bbf89a729, 'name': SearchDatastore_Task, 'duration_secs': 0.018433} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.118472] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff72bde4-afa3-4e6a-a115-48ce782a11ae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.121811] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 576.121811] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52635be1-4785-1dd1-7b77-8834936a075c" [ 576.121811] env[63028]: _type = "Task" [ 576.121811] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.130948] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52635be1-4785-1dd1-7b77-8834936a075c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.157302] env[63028]: DEBUG oslo_vmware.api [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Task: {'id': task-2734855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.512466} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.157732] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 576.158060] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 576.158165] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 576.158341] env[63028]: INFO nova.compute.manager [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Took 1.59 seconds to destroy the instance on the hypervisor. [ 576.158580] env[63028]: DEBUG oslo.service.loopingcall [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 576.158768] env[63028]: DEBUG nova.compute.manager [-] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 576.158858] env[63028]: DEBUG nova.network.neutron [-] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 576.205748] env[63028]: DEBUG nova.compute.manager [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 576.242700] env[63028]: DEBUG nova.virt.hardware [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 576.243061] env[63028]: DEBUG nova.virt.hardware [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 576.244332] env[63028]: DEBUG nova.virt.hardware [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 576.244332] env[63028]: DEBUG nova.virt.hardware [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 576.244332] env[63028]: DEBUG nova.virt.hardware [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 576.244332] env[63028]: DEBUG nova.virt.hardware [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 576.244332] env[63028]: DEBUG nova.virt.hardware [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 576.245289] env[63028]: DEBUG nova.virt.hardware [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 576.245289] env[63028]: DEBUG nova.virt.hardware [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 576.245289] env[63028]: DEBUG nova.virt.hardware [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 576.245289] env[63028]: DEBUG nova.virt.hardware [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 576.245959] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cd6186-2dd5-441c-8f69-d5f99becbee5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.256993] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce93ef9-6f48-428a-858c-5e9dc5ad0eeb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.277677] env[63028]: DEBUG nova.network.neutron [-] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.535213] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.364s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.535213] env[63028]: DEBUG nova.compute.manager [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 576.537836] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734851, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.697071} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.538355] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.113s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.540144] env[63028]: INFO nova.compute.claims [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 576.542008] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1/94b1bf30-0f9b-4197-99ff-6631a13ab2d1.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 576.545577] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 576.545577] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b83dc485-d12c-4407-ab02-4315e14f91dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.556414] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 576.556414] env[63028]: value = "task-2734857" [ 576.556414] env[63028]: _type = "Task" [ 576.556414] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.563934] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734853, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.569237] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734857, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.600130] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734856, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.900323} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.600130] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] a167df01-05e4-453d-8800-9c104d912474/a167df01-05e4-453d-8800-9c104d912474.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 576.600692] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 576.600692] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d875cb9-4bdd-4df9-a2d9-c7a834b866a9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.608165] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Waiting for the task: (returnval){ [ 576.608165] env[63028]: value = "task-2734858" [ 576.608165] env[63028]: _type = "Task" [ 576.608165] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.621239] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734858, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.637679] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52635be1-4785-1dd1-7b77-8834936a075c, 'name': SearchDatastore_Task, 'duration_secs': 0.055505} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.637986] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.638294] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] f311a533-5c48-410b-ba3b-58f0032c8816/f311a533-5c48-410b-ba3b-58f0032c8816.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 576.638536] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69a5b8d6-fe21-44a6-8b8d-414f79397f22 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.645659] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 576.645659] env[63028]: value = "task-2734859" [ 576.645659] env[63028]: _type = "Task" [ 576.645659] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.654606] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.779600] env[63028]: DEBUG nova.network.neutron [-] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.849340] env[63028]: DEBUG nova.network.neutron [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Updated VIF entry in instance network info cache for port c324d23d-8733-4dee-a740-12bc47cfb838. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 576.849340] env[63028]: DEBUG nova.network.neutron [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Updating instance_info_cache with network_info: [{"id": "c324d23d-8733-4dee-a740-12bc47cfb838", "address": "fa:16:3e:11:c2:3b", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc324d23d-87", "ovs_interfaceid": "c324d23d-8733-4dee-a740-12bc47cfb838", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.041595] env[63028]: DEBUG nova.compute.utils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 577.041595] env[63028]: DEBUG nova.compute.manager [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Not allocating networking since 'none' was specified. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 577.057116] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734853, 'name': ReconfigVM_Task, 'duration_secs': 1.610169} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.061762] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 679fca11-7390-4596-ab74-2f82a6cf8858/679fca11-7390-4596-ab74-2f82a6cf8858.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 577.062762] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2f85222-7672-421c-b31e-0dcdc4ce1304 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.071964] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "0dbafad1-ab21-439d-bc8e-e447ac33304e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.072241] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "0dbafad1-ab21-439d-bc8e-e447ac33304e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.072477] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734857, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072129} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.073797] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 577.074409] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 577.074409] env[63028]: value = "task-2734860" [ 577.074409] env[63028]: _type = "Task" [ 577.074409] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.075108] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4849cd5-f612-4c17-b73d-3905cf491fcf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.100718] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1/94b1bf30-0f9b-4197-99ff-6631a13ab2d1.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 577.104546] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25e0c284-f143-4640-b5ac-077755fdc19b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.118465] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734860, 'name': Rename_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.128209] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734858, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130579} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.129353] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 577.130464] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 577.130464] env[63028]: value = "task-2734861" [ 577.130464] env[63028]: _type = "Task" [ 577.130464] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.131197] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0151df8f-8f57-4c3b-83bd-a9126c71ee9d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.141924] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.162906] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] a167df01-05e4-453d-8800-9c104d912474/a167df01-05e4-453d-8800-9c104d912474.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 577.164916] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a98be3c7-14e2-444d-9513-f55feba6bf36 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.185568] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.187086] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Waiting for the task: (returnval){ [ 577.187086] env[63028]: value = "task-2734862" [ 577.187086] env[63028]: _type = "Task" [ 577.187086] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.194997] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734862, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.283607] env[63028]: INFO nova.compute.manager [-] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Took 1.12 seconds to deallocate network for instance. [ 577.354190] env[63028]: DEBUG oslo_concurrency.lockutils [req-49c7bdd3-70d0-471e-9610-46d5f73148ff req-efe62cdc-b715-49aa-8f51-7989fec61188 service nova] Releasing lock "refresh_cache-f311a533-5c48-410b-ba3b-58f0032c8816" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.546405] env[63028]: DEBUG nova.compute.manager [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 577.596415] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734860, 'name': Rename_Task, 'duration_secs': 0.342464} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.600869] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 577.600869] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c3316f4-b586-44d7-8cf5-ae4ec668771c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.609134] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 577.609134] env[63028]: value = "task-2734863" [ 577.609134] env[63028]: _type = "Task" [ 577.609134] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.621645] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734863, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.648294] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.670393] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734859, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.952914} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.672721] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] f311a533-5c48-410b-ba3b-58f0032c8816/f311a533-5c48-410b-ba3b-58f0032c8816.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 577.672956] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 577.673561] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7764d4e3-57d7-487d-98f2-691993ffdcbf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.682521] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 577.682521] env[63028]: value = "task-2734864" [ 577.682521] env[63028]: _type = "Task" [ 577.682521] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.690416] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734864, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.703166] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734862, 'name': ReconfigVM_Task, 'duration_secs': 0.47703} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.703166] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Reconfigured VM instance instance-00000004 to attach disk [datastore2] a167df01-05e4-453d-8800-9c104d912474/a167df01-05e4-453d-8800-9c104d912474.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 577.703815] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7faaa76b-f51b-42df-bc2b-f0e3fa2301cb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.710827] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Waiting for the task: (returnval){ [ 577.710827] env[63028]: value = "task-2734865" [ 577.710827] env[63028]: _type = "Task" [ 577.710827] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.727068] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734865, 'name': Rename_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.792177] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.827081] env[63028]: DEBUG nova.network.neutron [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Successfully updated port: 0756fdb5-7e3d-45e1-b815-668c72c5e804 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 577.875287] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff907f38-a964-495e-9082-3b896c0c5a03 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.890401] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c973e6-6d1e-4906-8211-d4607bd1079a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.930661] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b3c969-22c5-4baf-973b-aa80d1e7f35f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.939829] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44523bf-527c-4573-8cf9-6ac9dd1f83d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.956231] env[63028]: DEBUG nova.compute.provider_tree [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.993735] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Acquiring lock "c9cc1ac7-06c6-415b-86ce-daf4849bfc05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.993986] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lock "c9cc1ac7-06c6-415b-86ce-daf4849bfc05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.131061] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734863, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.148428] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734861, 'name': ReconfigVM_Task, 'duration_secs': 0.808015} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.148968] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1/94b1bf30-0f9b-4197-99ff-6631a13ab2d1.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 578.150867] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d9cb415-9c37-4a22-a9ce-3eeb0c290816 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.159156] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 578.159156] env[63028]: value = "task-2734866" [ 578.159156] env[63028]: _type = "Task" [ 578.159156] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.169212] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734866, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.193908] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734864, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.224376] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734865, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.334723] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Acquiring lock "refresh_cache-f80df630-327b-4923-a785-5d2e48fe1f19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.334903] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Acquired lock "refresh_cache-f80df630-327b-4923-a785-5d2e48fe1f19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.335072] env[63028]: DEBUG nova.network.neutron [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 578.458658] env[63028]: DEBUG nova.scheduler.client.report [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 578.568769] env[63028]: DEBUG nova.compute.manager [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 578.610638] env[63028]: DEBUG nova.virt.hardware [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 578.610638] env[63028]: DEBUG nova.virt.hardware [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 578.610638] env[63028]: DEBUG nova.virt.hardware [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 578.610902] env[63028]: DEBUG nova.virt.hardware [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 578.610902] env[63028]: DEBUG nova.virt.hardware [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 578.610902] env[63028]: DEBUG nova.virt.hardware [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 578.611149] env[63028]: DEBUG nova.virt.hardware [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 578.611401] env[63028]: DEBUG nova.virt.hardware [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 578.611611] env[63028]: DEBUG nova.virt.hardware [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 578.611768] env[63028]: DEBUG nova.virt.hardware [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 578.611951] env[63028]: DEBUG nova.virt.hardware [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 578.613431] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95237894-5b68-482c-891f-15fa1bb64009 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.629167] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.629167] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.631090] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea8989b-fc10-4071-9ae2-77e9b0817f02 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.641078] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734863, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.654201] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Instance VIF info [] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 578.660453] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Creating folder: Project (a7006f93e6fb43a487f357c2de5e1cd8). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 578.661510] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-658b7fd5-affa-4444-a594-8f88fea386ea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.671820] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734866, 'name': Rename_Task, 'duration_secs': 0.179865} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.672229] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 578.672608] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f106660b-b011-4e4b-848c-da6f7aba80a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.677722] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Created folder: Project (a7006f93e6fb43a487f357c2de5e1cd8) in parent group-v550570. [ 578.677976] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Creating folder: Instances. Parent ref: group-v550590. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 578.679677] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dcb175dc-968f-44ff-89c6-b34ee8b98955 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.681557] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 578.681557] env[63028]: value = "task-2734868" [ 578.681557] env[63028]: _type = "Task" [ 578.681557] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.690519] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Created folder: Instances in parent group-v550590. [ 578.691689] env[63028]: DEBUG oslo.service.loopingcall [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 578.694955] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 578.695272] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734868, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.695483] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b667ad3-1c08-4821-8f9f-2f5e3d97d31e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.711818] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734864, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.012604} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.712660] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 578.713860] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf764647-610d-4cc8-bc4e-e6267cc23ba7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.718726] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 578.718726] env[63028]: value = "task-2734870" [ 578.718726] env[63028]: _type = "Task" [ 578.718726] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.744974] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] f311a533-5c48-410b-ba3b-58f0032c8816/f311a533-5c48-410b-ba3b-58f0032c8816.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 578.750725] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae0f044a-93fb-4947-92f4-eca76f2c2738 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.765182] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734865, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.769312] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734870, 'name': CreateVM_Task} progress is 15%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.774829] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 578.774829] env[63028]: value = "task-2734871" [ 578.774829] env[63028]: _type = "Task" [ 578.774829] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.786362] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734871, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.831045] env[63028]: DEBUG nova.compute.manager [req-3a853502-ea0c-4b75-8308-06f92b5f785e req-24494eae-637e-4f6d-86cf-5c06e5f726d0 service nova] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Received event network-vif-plugged-0756fdb5-7e3d-45e1-b815-668c72c5e804 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 578.831045] env[63028]: DEBUG oslo_concurrency.lockutils [req-3a853502-ea0c-4b75-8308-06f92b5f785e req-24494eae-637e-4f6d-86cf-5c06e5f726d0 service nova] Acquiring lock "f80df630-327b-4923-a785-5d2e48fe1f19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.831045] env[63028]: DEBUG oslo_concurrency.lockutils [req-3a853502-ea0c-4b75-8308-06f92b5f785e req-24494eae-637e-4f6d-86cf-5c06e5f726d0 service nova] Lock "f80df630-327b-4923-a785-5d2e48fe1f19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.831299] env[63028]: DEBUG oslo_concurrency.lockutils [req-3a853502-ea0c-4b75-8308-06f92b5f785e req-24494eae-637e-4f6d-86cf-5c06e5f726d0 service nova] Lock "f80df630-327b-4923-a785-5d2e48fe1f19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.831616] env[63028]: DEBUG nova.compute.manager [req-3a853502-ea0c-4b75-8308-06f92b5f785e req-24494eae-637e-4f6d-86cf-5c06e5f726d0 service nova] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] No waiting events found dispatching network-vif-plugged-0756fdb5-7e3d-45e1-b815-668c72c5e804 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 578.831616] env[63028]: WARNING nova.compute.manager [req-3a853502-ea0c-4b75-8308-06f92b5f785e req-24494eae-637e-4f6d-86cf-5c06e5f726d0 service nova] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Received unexpected event network-vif-plugged-0756fdb5-7e3d-45e1-b815-668c72c5e804 for instance with vm_state building and task_state spawning. [ 578.908978] env[63028]: DEBUG nova.network.neutron [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.965842] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.966441] env[63028]: DEBUG nova.compute.manager [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 578.973022] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.289s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.973022] env[63028]: INFO nova.compute.claims [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.124636] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734863, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.144577] env[63028]: DEBUG nova.network.neutron [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Successfully updated port: 735ece36-635c-498c-ad00-02b6ef32bba7 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 579.192396] env[63028]: DEBUG oslo_vmware.api [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734868, 'name': PowerOnVM_Task, 'duration_secs': 0.466855} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.192980] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 579.193719] env[63028]: INFO nova.compute.manager [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Took 10.63 seconds to spawn the instance on the hypervisor. [ 579.194318] env[63028]: DEBUG nova.compute.manager [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 579.194730] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4962de9-b42d-4586-86e5-01aa1fb2ec2b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.235757] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734865, 'name': Rename_Task, 'duration_secs': 1.058848} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.242239] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 579.243397] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734870, 'name': CreateVM_Task, 'duration_secs': 0.347345} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.243397] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-727b853d-b1cc-4ea1-9f25-1a92869e8777 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.244744] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 579.245204] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.245360] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.245653] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 579.246233] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da1635b9-30ae-4334-87ee-673bde4dbe30 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.250594] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Waiting for the task: (returnval){ [ 579.250594] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ebe69b-7432-b259-0484-9bb1688e1423" [ 579.250594] env[63028]: _type = "Task" [ 579.250594] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.251765] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Waiting for the task: (returnval){ [ 579.251765] env[63028]: value = "task-2734872" [ 579.251765] env[63028]: _type = "Task" [ 579.251765] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.264071] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ebe69b-7432-b259-0484-9bb1688e1423, 'name': SearchDatastore_Task, 'duration_secs': 0.009334} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.267110] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.267292] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 579.267529] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.267676] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.267851] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 579.268127] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734872, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.268671] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b17a7753-66b2-4e30-9808-eaf073596d18 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.280625] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 579.280832] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 579.281910] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4debd55-1149-4e97-b972-b50531dc5c18 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.287157] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734871, 'name': ReconfigVM_Task, 'duration_secs': 0.298967} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.287578] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Reconfigured VM instance instance-00000006 to attach disk [datastore2] f311a533-5c48-410b-ba3b-58f0032c8816/f311a533-5c48-410b-ba3b-58f0032c8816.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 579.288296] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d4ca4d4-891e-498e-b18c-253339e76bcc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.290791] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Waiting for the task: (returnval){ [ 579.290791] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fefd7c-0ce4-7183-6a0a-1a11019e9002" [ 579.290791] env[63028]: _type = "Task" [ 579.290791] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.295980] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 579.295980] env[63028]: value = "task-2734873" [ 579.295980] env[63028]: _type = "Task" [ 579.295980] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.302445] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fefd7c-0ce4-7183-6a0a-1a11019e9002, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.303301] env[63028]: DEBUG nova.network.neutron [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Updating instance_info_cache with network_info: [{"id": "0756fdb5-7e3d-45e1-b815-668c72c5e804", "address": "fa:16:3e:db:8f:bc", "network": {"id": "968b8886-204c-4bbb-b91b-d4a4bd882174", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2079634116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb987fc85bab4863a97bfabd29b5b141", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0756fdb5-7e", "ovs_interfaceid": "0756fdb5-7e3d-45e1-b815-668c72c5e804", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.311455] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734873, 'name': Rename_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.472395] env[63028]: DEBUG nova.compute.utils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 579.477698] env[63028]: DEBUG nova.compute.manager [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Not allocating networking since 'none' was specified. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 579.627113] env[63028]: DEBUG oslo_vmware.api [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2734863, 'name': PowerOnVM_Task, 'duration_secs': 1.929775} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.627418] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 579.628205] env[63028]: INFO nova.compute.manager [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Took 17.84 seconds to spawn the instance on the hypervisor. [ 579.628205] env[63028]: DEBUG nova.compute.manager [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 579.628602] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d48bfdd-07a4-47e4-9837-97b6243cec20 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.649716] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "refresh_cache-e20ed04f-205b-4aa9-b8b6-e352cd237412" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.649883] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquired lock "refresh_cache-e20ed04f-205b-4aa9-b8b6-e352cd237412" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.650086] env[63028]: DEBUG nova.network.neutron [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 579.712614] env[63028]: INFO nova.compute.manager [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Took 19.23 seconds to build instance. [ 579.769181] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734872, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.804999] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fefd7c-0ce4-7183-6a0a-1a11019e9002, 'name': SearchDatastore_Task, 'duration_secs': 0.008943} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.805786] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Releasing lock "refresh_cache-f80df630-327b-4923-a785-5d2e48fe1f19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.806087] env[63028]: DEBUG nova.compute.manager [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Instance network_info: |[{"id": "0756fdb5-7e3d-45e1-b815-668c72c5e804", "address": "fa:16:3e:db:8f:bc", "network": {"id": "968b8886-204c-4bbb-b91b-d4a4bd882174", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2079634116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb987fc85bab4863a97bfabd29b5b141", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0756fdb5-7e", "ovs_interfaceid": "0756fdb5-7e3d-45e1-b815-668c72c5e804", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 579.806324] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d60da62-dcd4-4d81-ac33-bf157abe47dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.811770] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:8f:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2ede0e6-8d7a-4018-bb37-25bf388e9867', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0756fdb5-7e3d-45e1-b815-668c72c5e804', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 579.819600] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Creating folder: Project (bb987fc85bab4863a97bfabd29b5b141). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 579.819600] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734873, 'name': Rename_Task, 'duration_secs': 0.143312} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.819752] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c3d1829-42b4-4fa7-8286-576bf164174d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.821982] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 579.822663] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02f05074-6728-4dae-ac76-d55b456760dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.826280] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Waiting for the task: (returnval){ [ 579.826280] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e21bdf-5127-c4dc-1d5c-2c17a371003b" [ 579.826280] env[63028]: _type = "Task" [ 579.826280] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.831444] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 579.831444] env[63028]: value = "task-2734875" [ 579.831444] env[63028]: _type = "Task" [ 579.831444] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.834587] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e21bdf-5127-c4dc-1d5c-2c17a371003b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.838886] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Created folder: Project (bb987fc85bab4863a97bfabd29b5b141) in parent group-v550570. [ 579.839091] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Creating folder: Instances. Parent ref: group-v550593. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 579.839339] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-29f20530-20a5-497b-b987-4a4cbf43733f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.849757] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734875, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.851163] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Created folder: Instances in parent group-v550593. [ 579.851400] env[63028]: DEBUG oslo.service.loopingcall [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 579.851583] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 579.851787] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1fbc4f2b-fa83-40bb-a9c2-f69a8c8a8f46 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.870693] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 579.870693] env[63028]: value = "task-2734877" [ 579.870693] env[63028]: _type = "Task" [ 579.870693] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.879373] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734877, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.887965] env[63028]: DEBUG nova.compute.manager [req-bf006ec5-26fd-45d7-a52a-aaf5a79710f4 req-71bf9558-072f-47c0-94c3-b47e78984329 service nova] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Received event network-vif-plugged-735ece36-635c-498c-ad00-02b6ef32bba7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 579.888227] env[63028]: DEBUG oslo_concurrency.lockutils [req-bf006ec5-26fd-45d7-a52a-aaf5a79710f4 req-71bf9558-072f-47c0-94c3-b47e78984329 service nova] Acquiring lock "e20ed04f-205b-4aa9-b8b6-e352cd237412-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.888418] env[63028]: DEBUG oslo_concurrency.lockutils [req-bf006ec5-26fd-45d7-a52a-aaf5a79710f4 req-71bf9558-072f-47c0-94c3-b47e78984329 service nova] Lock "e20ed04f-205b-4aa9-b8b6-e352cd237412-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.888583] env[63028]: DEBUG oslo_concurrency.lockutils [req-bf006ec5-26fd-45d7-a52a-aaf5a79710f4 req-71bf9558-072f-47c0-94c3-b47e78984329 service nova] Lock "e20ed04f-205b-4aa9-b8b6-e352cd237412-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.888785] env[63028]: DEBUG nova.compute.manager [req-bf006ec5-26fd-45d7-a52a-aaf5a79710f4 req-71bf9558-072f-47c0-94c3-b47e78984329 service nova] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] No waiting events found dispatching network-vif-plugged-735ece36-635c-498c-ad00-02b6ef32bba7 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 579.888971] env[63028]: WARNING nova.compute.manager [req-bf006ec5-26fd-45d7-a52a-aaf5a79710f4 req-71bf9558-072f-47c0-94c3-b47e78984329 service nova] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Received unexpected event network-vif-plugged-735ece36-635c-498c-ad00-02b6ef32bba7 for instance with vm_state building and task_state spawning. [ 579.981110] env[63028]: DEBUG nova.compute.manager [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 580.170834] env[63028]: INFO nova.compute.manager [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Took 24.25 seconds to build instance. [ 580.215100] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fda4047a-9e01-42e1-8149-a49f72d41c4d tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lock "94b1bf30-0f9b-4197-99ff-6631a13ab2d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.741s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.250135] env[63028]: DEBUG nova.network.neutron [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 580.271428] env[63028]: DEBUG oslo_vmware.api [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734872, 'name': PowerOnVM_Task, 'duration_secs': 0.89922} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.275821] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 580.276057] env[63028]: INFO nova.compute.manager [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Took 16.22 seconds to spawn the instance on the hypervisor. [ 580.276240] env[63028]: DEBUG nova.compute.manager [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 580.284286] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28bb000-8177-4b03-84a5-f24ae9c1852e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.347639] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e21bdf-5127-c4dc-1d5c-2c17a371003b, 'name': SearchDatastore_Task, 'duration_secs': 0.011837} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.348354] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.348690] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 1eeb96d1-6e03-4192-a9db-955444519fd7/1eeb96d1-6e03-4192-a9db-955444519fd7.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 580.349242] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6202fd4f-8b96-4f0d-80a4-3af09862e960 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.355829] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddcd40cf-5800-4109-8e93-57d5c75c5b71 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.358619] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734875, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.369926] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Waiting for the task: (returnval){ [ 580.369926] env[63028]: value = "task-2734878" [ 580.369926] env[63028]: _type = "Task" [ 580.369926] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.370424] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3671833-4d4c-4c14-9113-294b36431f9d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.391396] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.391597] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734877, 'name': CreateVM_Task, 'duration_secs': 0.457226} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.423244] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 580.424063] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.424262] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.424564] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 580.425822] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a39656e2-7f89-48b0-b409-c0c591d7629d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.428273] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f28d962-62c5-4298-a9f2-0499fe887069 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.435829] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Waiting for the task: (returnval){ [ 580.435829] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ba2696-3e58-bfd9-3110-d8d886298ec6" [ 580.435829] env[63028]: _type = "Task" [ 580.435829] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.437139] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ca7b75-257d-454c-9ceb-0426e17b05bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.453988] env[63028]: DEBUG nova.compute.provider_tree [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.458641] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ba2696-3e58-bfd9-3110-d8d886298ec6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.675438] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79897f00-0a7d-4bd4-8430-d0614e82dc58 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "679fca11-7390-4596-ab74-2f82a6cf8858" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.769s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.720756] env[63028]: DEBUG nova.compute.manager [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 580.735468] env[63028]: DEBUG nova.network.neutron [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Updating instance_info_cache with network_info: [{"id": "735ece36-635c-498c-ad00-02b6ef32bba7", "address": "fa:16:3e:de:8d:69", "network": {"id": "103ef17f-0425-48ae-98df-250d0d0dd66a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-13124710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b7c1be3c4343a4b4f288a355170873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap735ece36-63", "ovs_interfaceid": "735ece36-635c-498c-ad00-02b6ef32bba7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.813215] env[63028]: INFO nova.compute.manager [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Took 21.48 seconds to build instance. [ 580.850373] env[63028]: DEBUG oslo_vmware.api [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734875, 'name': PowerOnVM_Task, 'duration_secs': 0.910059} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.850564] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 580.850879] env[63028]: INFO nova.compute.manager [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Took 14.53 seconds to spawn the instance on the hypervisor. [ 580.851259] env[63028]: DEBUG nova.compute.manager [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 580.852030] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694ebaf3-cf41-4ecc-ac2c-b14e855d4720 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.890520] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734878, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.961854] env[63028]: DEBUG nova.scheduler.client.report [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 580.965550] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ba2696-3e58-bfd9-3110-d8d886298ec6, 'name': SearchDatastore_Task, 'duration_secs': 0.032062} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.965867] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.966134] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 580.966428] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.966646] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.966930] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 580.967558] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29269427-da60-4db8-9e8f-7ac0a542dfe4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.978044] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 580.980018] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 580.980018] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd6bce3d-7d19-43c1-8f59-a81fe77ee884 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.988259] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Waiting for the task: (returnval){ [ 580.988259] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522fc957-56dd-1d80-1ae8-ea4b01b1e8dc" [ 580.988259] env[63028]: _type = "Task" [ 580.988259] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.000844] env[63028]: DEBUG nova.compute.manager [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 581.002791] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522fc957-56dd-1d80-1ae8-ea4b01b1e8dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.038532] env[63028]: DEBUG nova.virt.hardware [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 581.039016] env[63028]: DEBUG nova.virt.hardware [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 581.039327] env[63028]: DEBUG nova.virt.hardware [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 581.039608] env[63028]: DEBUG nova.virt.hardware [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 581.043024] env[63028]: DEBUG nova.virt.hardware [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 581.043024] env[63028]: DEBUG nova.virt.hardware [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 581.043024] env[63028]: DEBUG nova.virt.hardware [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 581.043024] env[63028]: DEBUG nova.virt.hardware [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 581.043024] env[63028]: DEBUG nova.virt.hardware [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 581.043474] env[63028]: DEBUG nova.virt.hardware [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 581.043474] env[63028]: DEBUG nova.virt.hardware [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 581.043474] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c9dc08-33fd-4a2d-b649-92f2964143db {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.052456] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42c101c-d67f-4796-b796-58706a360d62 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.073266] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Instance VIF info [] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 581.080206] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Creating folder: Project (4ade631b328246c3a38037adf62f6cc2). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 581.083043] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49ab9f51-244b-4b1e-b062-1b5036bed2cd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.093831] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Created folder: Project (4ade631b328246c3a38037adf62f6cc2) in parent group-v550570. [ 581.094245] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Creating folder: Instances. Parent ref: group-v550596. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 581.094245] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dcb84bfe-80e0-4a01-88d8-76e870c90bde {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.106681] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Created folder: Instances in parent group-v550596. [ 581.106681] env[63028]: DEBUG oslo.service.loopingcall [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 581.106843] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 581.107526] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d431ba3e-be4b-4581-ad34-8948d9b0b4de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.126255] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 581.126255] env[63028]: value = "task-2734881" [ 581.126255] env[63028]: _type = "Task" [ 581.126255] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.136458] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734881, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.179138] env[63028]: DEBUG nova.compute.manager [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 581.239037] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Releasing lock "refresh_cache-e20ed04f-205b-4aa9-b8b6-e352cd237412" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.239037] env[63028]: DEBUG nova.compute.manager [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Instance network_info: |[{"id": "735ece36-635c-498c-ad00-02b6ef32bba7", "address": "fa:16:3e:de:8d:69", "network": {"id": "103ef17f-0425-48ae-98df-250d0d0dd66a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-13124710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b7c1be3c4343a4b4f288a355170873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap735ece36-63", "ovs_interfaceid": "735ece36-635c-498c-ad00-02b6ef32bba7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 581.239260] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:8d:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3261e15f-7e45-4516-acfd-341bab16e3cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '735ece36-635c-498c-ad00-02b6ef32bba7', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 581.246372] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Creating folder: Project (43b7c1be3c4343a4b4f288a355170873). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 581.247335] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e623befe-335d-4240-9c08-6e48d1910826 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.254143] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.259830] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Created folder: Project (43b7c1be3c4343a4b4f288a355170873) in parent group-v550570. [ 581.260022] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Creating folder: Instances. Parent ref: group-v550599. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 581.260258] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd2811ef-82d2-4344-97c9-79f9a32ef03b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.269702] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Created folder: Instances in parent group-v550599. [ 581.269980] env[63028]: DEBUG oslo.service.loopingcall [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 581.270143] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 581.270348] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0252a45-d8c7-4321-9f0f-5cf17718df2d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.292171] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 581.292171] env[63028]: value = "task-2734884" [ 581.292171] env[63028]: _type = "Task" [ 581.292171] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.299826] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734884, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.314576] env[63028]: DEBUG oslo_concurrency.lockutils [None req-147062de-91cc-4792-8607-76dedea7efea tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Lock "a167df01-05e4-453d-8800-9c104d912474" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.998s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.375082] env[63028]: INFO nova.compute.manager [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Took 20.90 seconds to build instance. [ 581.391341] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734878, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.692306} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.391341] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 1eeb96d1-6e03-4192-a9db-955444519fd7/1eeb96d1-6e03-4192-a9db-955444519fd7.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 581.391521] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 581.391783] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9ba4912-f9a0-4bb5-a017-e121ec358b20 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.400896] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Waiting for the task: (returnval){ [ 581.400896] env[63028]: value = "task-2734885" [ 581.400896] env[63028]: _type = "Task" [ 581.400896] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.413850] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734885, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.467117] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.468016] env[63028]: DEBUG nova.compute.manager [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 581.472644] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.149s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.477452] env[63028]: INFO nova.compute.claims [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 581.518451] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522fc957-56dd-1d80-1ae8-ea4b01b1e8dc, 'name': SearchDatastore_Task, 'duration_secs': 0.025426} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.518451] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf74bf59-3e46-4ff1-9c4f-8f69346bbae5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.526713] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Waiting for the task: (returnval){ [ 581.526713] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52435d9a-a2d0-0ec3-65c3-55f6df40fe9d" [ 581.526713] env[63028]: _type = "Task" [ 581.526713] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.539501] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52435d9a-a2d0-0ec3-65c3-55f6df40fe9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.647444] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734881, 'name': CreateVM_Task, 'duration_secs': 0.500418} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.649522] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 581.649522] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.649522] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.649522] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 581.649522] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-253596d4-772f-402c-b88d-08f007eff950 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.662544] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Acquiring lock "4a782483-c24e-44db-b697-856c69cc4a13" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.662752] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Lock "4a782483-c24e-44db-b697-856c69cc4a13" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.666125] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 581.666125] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52efaf92-45ef-2bac-00bd-7c79dcc2c6dc" [ 581.666125] env[63028]: _type = "Task" [ 581.666125] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.674973] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52efaf92-45ef-2bac-00bd-7c79dcc2c6dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.713671] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.806219] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734884, 'name': CreateVM_Task, 'duration_secs': 0.470401} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.806415] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 581.807274] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.818995] env[63028]: DEBUG nova.compute.manager [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 581.877973] env[63028]: DEBUG oslo_concurrency.lockutils [None req-51a72729-4a98-4e9c-9e35-60196cf706b3 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "f311a533-5c48-410b-ba3b-58f0032c8816" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.413s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.887779] env[63028]: DEBUG nova.compute.manager [req-8fde8cf3-ac09-41f5-8f71-d41386f73d7e req-859549e6-8dab-420c-9946-7ffc02ab43a7 service nova] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Received event network-changed-0756fdb5-7e3d-45e1-b815-668c72c5e804 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 581.888076] env[63028]: DEBUG nova.compute.manager [req-8fde8cf3-ac09-41f5-8f71-d41386f73d7e req-859549e6-8dab-420c-9946-7ffc02ab43a7 service nova] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Refreshing instance network info cache due to event network-changed-0756fdb5-7e3d-45e1-b815-668c72c5e804. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 581.888309] env[63028]: DEBUG oslo_concurrency.lockutils [req-8fde8cf3-ac09-41f5-8f71-d41386f73d7e req-859549e6-8dab-420c-9946-7ffc02ab43a7 service nova] Acquiring lock "refresh_cache-f80df630-327b-4923-a785-5d2e48fe1f19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.888465] env[63028]: DEBUG oslo_concurrency.lockutils [req-8fde8cf3-ac09-41f5-8f71-d41386f73d7e req-859549e6-8dab-420c-9946-7ffc02ab43a7 service nova] Acquired lock "refresh_cache-f80df630-327b-4923-a785-5d2e48fe1f19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.888629] env[63028]: DEBUG nova.network.neutron [req-8fde8cf3-ac09-41f5-8f71-d41386f73d7e req-859549e6-8dab-420c-9946-7ffc02ab43a7 service nova] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Refreshing network info cache for port 0756fdb5-7e3d-45e1-b815-668c72c5e804 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 581.916989] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734885, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072215} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.917298] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 581.918144] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654d847b-7032-49d8-9f1d-fa2063dd33e5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.942994] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 1eeb96d1-6e03-4192-a9db-955444519fd7/1eeb96d1-6e03-4192-a9db-955444519fd7.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 581.943854] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc679cea-9cd9-4f79-a52b-e17f58418a5c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.966577] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Waiting for the task: (returnval){ [ 581.966577] env[63028]: value = "task-2734886" [ 581.966577] env[63028]: _type = "Task" [ 581.966577] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.973529] env[63028]: DEBUG nova.compute.utils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 581.976682] env[63028]: DEBUG nova.compute.manager [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 581.976900] env[63028]: DEBUG nova.network.neutron [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 581.988824] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734886, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.046066] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52435d9a-a2d0-0ec3-65c3-55f6df40fe9d, 'name': SearchDatastore_Task, 'duration_secs': 0.014618} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.046548] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.046986] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] f80df630-327b-4923-a785-5d2e48fe1f19/f80df630-327b-4923-a785-5d2e48fe1f19.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 582.047386] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0124926e-7420-4210-ab3d-1c35af8e2c9a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.056805] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Waiting for the task: (returnval){ [ 582.056805] env[63028]: value = "task-2734887" [ 582.056805] env[63028]: _type = "Task" [ 582.056805] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.067644] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.176595] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52efaf92-45ef-2bac-00bd-7c79dcc2c6dc, 'name': SearchDatastore_Task, 'duration_secs': 0.0133} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.176922] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.177239] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 582.177406] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.177551] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.178157] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 582.178157] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.178350] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 582.178569] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d054764-20cb-49b2-bc73-a264c1ae95e0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.180742] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90f53419-00e7-4054-9eb8-09e8fd0d8aa5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.188331] env[63028]: DEBUG nova.policy [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9985d6dbdf464c99811064f6bdd790ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1650f0c2f7654117abc43478061bbc00', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 582.191436] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 582.192359] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 582.192455] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 582.192455] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521bb7e2-62ae-fb1e-c366-950a11bc4cc3" [ 582.192455] env[63028]: _type = "Task" [ 582.192455] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.192816] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-093ef1ab-a895-43b1-b813-676147c1d4f3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.213344] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521bb7e2-62ae-fb1e-c366-950a11bc4cc3, 'name': SearchDatastore_Task, 'duration_secs': 0.016593} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.213344] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.213537] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 582.213764] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.214083] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 582.214083] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520abc5f-074b-0b66-29d2-0e23467e8ce5" [ 582.214083] env[63028]: _type = "Task" [ 582.214083] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.229077] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520abc5f-074b-0b66-29d2-0e23467e8ce5, 'name': SearchDatastore_Task, 'duration_secs': 0.018421} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.230116] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5285b0b-f786-4216-b715-6de5f4933538 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.236035] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 582.236035] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5283817c-f04f-6dfe-5bef-38915942bdcf" [ 582.236035] env[63028]: _type = "Task" [ 582.236035] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.248216] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5283817c-f04f-6dfe-5bef-38915942bdcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.356262] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.381583] env[63028]: DEBUG nova.compute.manager [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 582.481745] env[63028]: DEBUG nova.compute.manager [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 582.486361] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734886, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.574416] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734887, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.722617] env[63028]: INFO nova.compute.manager [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Rebuilding instance [ 582.760542] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5283817c-f04f-6dfe-5bef-38915942bdcf, 'name': SearchDatastore_Task, 'duration_secs': 0.010405} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.760542] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.760542] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8/2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 582.764740] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.764882] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 582.765153] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4182d5b3-32cd-449e-a90f-be8ee0f30766 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.769511] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cb98654-dd47-4394-b2d5-87e7b91bc41c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.783827] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 582.783827] env[63028]: value = "task-2734888" [ 582.783827] env[63028]: _type = "Task" [ 582.783827] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.786669] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 582.786925] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 582.793052] env[63028]: DEBUG nova.compute.manager [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 582.797990] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d05bf5e5-22d6-4960-98ae-0d43534c636d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.801564] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a2d2f1-d5e2-4461-b6e7-8c6689dde3e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.812976] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.819176] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 582.819176] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5271f397-5d34-e865-ca4f-ce7336ec41c4" [ 582.819176] env[63028]: _type = "Task" [ 582.819176] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.833662] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5271f397-5d34-e865-ca4f-ce7336ec41c4, 'name': SearchDatastore_Task, 'duration_secs': 0.012647} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.835243] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-569b4b7b-7bab-438b-8e4f-d199398bb736 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.840792] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 582.840792] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b3b957-46dd-08df-d6fe-dc8836f6f075" [ 582.840792] env[63028]: _type = "Task" [ 582.840792] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.853736] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b3b957-46dd-08df-d6fe-dc8836f6f075, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.872244] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cec809b-577c-48f2-b83c-fc8d13733408 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.880750] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d19e3e-3dd9-4610-a366-95fb96af39fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.935255] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.935255] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915d15cc-14b1-4090-99b0-4d9f1abea40d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.942913] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2194f8-1a51-42b8-ba95-fd530c88bf86 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.958959] env[63028]: DEBUG nova.compute.provider_tree [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.961001] env[63028]: DEBUG nova.network.neutron [req-8fde8cf3-ac09-41f5-8f71-d41386f73d7e req-859549e6-8dab-420c-9946-7ffc02ab43a7 service nova] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Updated VIF entry in instance network info cache for port 0756fdb5-7e3d-45e1-b815-668c72c5e804. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 582.961330] env[63028]: DEBUG nova.network.neutron [req-8fde8cf3-ac09-41f5-8f71-d41386f73d7e req-859549e6-8dab-420c-9946-7ffc02ab43a7 service nova] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Updating instance_info_cache with network_info: [{"id": "0756fdb5-7e3d-45e1-b815-668c72c5e804", "address": "fa:16:3e:db:8f:bc", "network": {"id": "968b8886-204c-4bbb-b91b-d4a4bd882174", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2079634116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb987fc85bab4863a97bfabd29b5b141", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0756fdb5-7e", "ovs_interfaceid": "0756fdb5-7e3d-45e1-b815-668c72c5e804", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.980285] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734886, 'name': ReconfigVM_Task, 'duration_secs': 0.644892} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.980557] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 1eeb96d1-6e03-4192-a9db-955444519fd7/1eeb96d1-6e03-4192-a9db-955444519fd7.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 582.981230] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a4953d7-69e4-4991-a70c-e91be5bae0ee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.991727] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Waiting for the task: (returnval){ [ 582.991727] env[63028]: value = "task-2734889" [ 582.991727] env[63028]: _type = "Task" [ 582.991727] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.002446] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734889, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.042330] env[63028]: DEBUG nova.network.neutron [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Successfully created port: f7a43b4b-f49a-4b79-b488-55fd4852195c {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 583.070222] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734887, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.720453} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.070523] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] f80df630-327b-4923-a785-5d2e48fe1f19/f80df630-327b-4923-a785-5d2e48fe1f19.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 583.070801] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 583.071088] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6425eab-bc2a-4d30-bf9b-fcfc3b8654a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.082437] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Waiting for the task: (returnval){ [ 583.082437] env[63028]: value = "task-2734890" [ 583.082437] env[63028]: _type = "Task" [ 583.082437] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.084704] env[63028]: DEBUG nova.compute.manager [req-a7d549bb-eaae-4dd6-b739-a704dcd34bd1 req-f1f89478-4360-4628-a13c-1d4334b2ae36 service nova] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Received event network-changed-735ece36-635c-498c-ad00-02b6ef32bba7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 583.084957] env[63028]: DEBUG nova.compute.manager [req-a7d549bb-eaae-4dd6-b739-a704dcd34bd1 req-f1f89478-4360-4628-a13c-1d4334b2ae36 service nova] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Refreshing instance network info cache due to event network-changed-735ece36-635c-498c-ad00-02b6ef32bba7. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 583.085530] env[63028]: DEBUG oslo_concurrency.lockutils [req-a7d549bb-eaae-4dd6-b739-a704dcd34bd1 req-f1f89478-4360-4628-a13c-1d4334b2ae36 service nova] Acquiring lock "refresh_cache-e20ed04f-205b-4aa9-b8b6-e352cd237412" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.085530] env[63028]: DEBUG oslo_concurrency.lockutils [req-a7d549bb-eaae-4dd6-b739-a704dcd34bd1 req-f1f89478-4360-4628-a13c-1d4334b2ae36 service nova] Acquired lock "refresh_cache-e20ed04f-205b-4aa9-b8b6-e352cd237412" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.085792] env[63028]: DEBUG nova.network.neutron [req-a7d549bb-eaae-4dd6-b739-a704dcd34bd1 req-f1f89478-4360-4628-a13c-1d4334b2ae36 service nova] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Refreshing network info cache for port 735ece36-635c-498c-ad00-02b6ef32bba7 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 583.101968] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.298477] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734888, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.356902] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b3b957-46dd-08df-d6fe-dc8836f6f075, 'name': SearchDatastore_Task, 'duration_secs': 0.01244} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.357224] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.357644] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] e20ed04f-205b-4aa9-b8b6-e352cd237412/e20ed04f-205b-4aa9-b8b6-e352cd237412.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 583.357765] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08beb594-cfbf-491a-8f74-568353417daf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.371162] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 583.371162] env[63028]: value = "task-2734891" [ 583.371162] env[63028]: _type = "Task" [ 583.371162] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.387580] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2734891, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.403102] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "f311a533-5c48-410b-ba3b-58f0032c8816" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.403102] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "f311a533-5c48-410b-ba3b-58f0032c8816" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.403509] env[63028]: DEBUG nova.compute.manager [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 583.404600] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c495ef-4753-4853-8e63-8f930b480713 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.412926] env[63028]: DEBUG nova.compute.manager [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63028) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 583.413338] env[63028]: DEBUG nova.objects.instance [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lazy-loading 'flavor' on Instance uuid f311a533-5c48-410b-ba3b-58f0032c8816 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 583.464572] env[63028]: DEBUG nova.scheduler.client.report [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 583.472947] env[63028]: DEBUG oslo_concurrency.lockutils [req-8fde8cf3-ac09-41f5-8f71-d41386f73d7e req-859549e6-8dab-420c-9946-7ffc02ab43a7 service nova] Releasing lock "refresh_cache-f80df630-327b-4923-a785-5d2e48fe1f19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.493203] env[63028]: DEBUG nova.compute.manager [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 583.510051] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734889, 'name': Rename_Task, 'duration_secs': 0.200389} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.510051] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 583.510051] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32c9f8d0-0833-4a2c-a1fb-1d14cddc7472 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.517319] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Waiting for the task: (returnval){ [ 583.517319] env[63028]: value = "task-2734892" [ 583.517319] env[63028]: _type = "Task" [ 583.517319] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.528560] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734892, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.540018] env[63028]: DEBUG nova.virt.hardware [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 583.540018] env[63028]: DEBUG nova.virt.hardware [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 583.540018] env[63028]: DEBUG nova.virt.hardware [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 583.540253] env[63028]: DEBUG nova.virt.hardware [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 583.540253] env[63028]: DEBUG nova.virt.hardware [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 583.540253] env[63028]: DEBUG nova.virt.hardware [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 583.540253] env[63028]: DEBUG nova.virt.hardware [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 583.540253] env[63028]: DEBUG nova.virt.hardware [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 583.540386] env[63028]: DEBUG nova.virt.hardware [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 583.540707] env[63028]: DEBUG nova.virt.hardware [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 583.541221] env[63028]: DEBUG nova.virt.hardware [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 583.544135] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867f4d72-e27f-4d68-8380-9d7968921c94 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.553055] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808ef7b8-18e7-4d2c-8a9c-93708b74a050 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.594749] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14203} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.595561] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 583.598836] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c36a86b-e33e-4c51-a93b-287093ebae44 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.631903] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] f80df630-327b-4923-a785-5d2e48fe1f19/f80df630-327b-4923-a785-5d2e48fe1f19.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 583.631903] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37a69b20-69a0-4376-8585-f2fee790975e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.651908] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Waiting for the task: (returnval){ [ 583.651908] env[63028]: value = "task-2734893" [ 583.651908] env[63028]: _type = "Task" [ 583.651908] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.663527] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734893, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.800879] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734888, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647515} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.801319] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8/2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 583.802998] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 583.802998] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07bf1a79-d0a3-4dfb-b347-4e839a30ea8b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.812898] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 583.812898] env[63028]: value = "task-2734894" [ 583.812898] env[63028]: _type = "Task" [ 583.812898] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.828161] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734894, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.828161] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 583.828345] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d993d94e-2796-4a54-911d-f7706f750f25 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.841352] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 583.841352] env[63028]: value = "task-2734895" [ 583.841352] env[63028]: _type = "Task" [ 583.841352] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.856363] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734895, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.886126] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2734891, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.971569] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.972226] env[63028]: DEBUG nova.compute.manager [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 583.977515] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.185s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.978039] env[63028]: DEBUG nova.objects.instance [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Lazy-loading 'resources' on Instance uuid 03a19e41-1146-4560-8d93-16a23aa952da {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 584.035468] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734892, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.140229] env[63028]: DEBUG nova.network.neutron [req-a7d549bb-eaae-4dd6-b739-a704dcd34bd1 req-f1f89478-4360-4628-a13c-1d4334b2ae36 service nova] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Updated VIF entry in instance network info cache for port 735ece36-635c-498c-ad00-02b6ef32bba7. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 584.140654] env[63028]: DEBUG nova.network.neutron [req-a7d549bb-eaae-4dd6-b739-a704dcd34bd1 req-f1f89478-4360-4628-a13c-1d4334b2ae36 service nova] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Updating instance_info_cache with network_info: [{"id": "735ece36-635c-498c-ad00-02b6ef32bba7", "address": "fa:16:3e:de:8d:69", "network": {"id": "103ef17f-0425-48ae-98df-250d0d0dd66a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-13124710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b7c1be3c4343a4b4f288a355170873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap735ece36-63", "ovs_interfaceid": "735ece36-635c-498c-ad00-02b6ef32bba7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.164767] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734893, 'name': ReconfigVM_Task, 'duration_secs': 0.461723} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.166610] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Reconfigured VM instance instance-00000007 to attach disk [datastore2] f80df630-327b-4923-a785-5d2e48fe1f19/f80df630-327b-4923-a785-5d2e48fe1f19.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 584.166610] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9497b4f9-49cc-42eb-a19b-09231933a087 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.174852] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Waiting for the task: (returnval){ [ 584.174852] env[63028]: value = "task-2734896" [ 584.174852] env[63028]: _type = "Task" [ 584.174852] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.190947] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734896, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.194890] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Acquiring lock "679fca11-7390-4596-ab74-2f82a6cf8858" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.195465] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Lock "679fca11-7390-4596-ab74-2f82a6cf8858" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.196115] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Acquiring lock "679fca11-7390-4596-ab74-2f82a6cf8858-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.196115] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Lock "679fca11-7390-4596-ab74-2f82a6cf8858-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.196669] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Lock "679fca11-7390-4596-ab74-2f82a6cf8858-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.199453] env[63028]: INFO nova.compute.manager [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Terminating instance [ 584.324399] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734894, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127984} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.324679] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 584.325480] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690a4f81-b5ed-4e90-98bb-532dec28219e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.345704] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8/2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 584.346486] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92e723ff-0d80-4902-b89d-5d88dce0398d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.371801] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734895, 'name': PowerOffVM_Task, 'duration_secs': 0.145301} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.372790] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 584.372881] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 584.373307] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 584.373307] env[63028]: value = "task-2734897" [ 584.373307] env[63028]: _type = "Task" [ 584.373307] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.373957] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c7c12a-b6b8-446a-97ff-160cacd86c8a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.389642] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2734891, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663864} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.395420] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] e20ed04f-205b-4aa9-b8b6-e352cd237412/e20ed04f-205b-4aa9-b8b6-e352cd237412.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 584.395606] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 584.395946] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 584.396188] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734897, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.396441] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84c2ae2f-4878-4e15-8b79-66a148e52521 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.399091] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-942124a6-93cb-4f1a-8cbc-7193463c6cfa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.406440] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 584.406440] env[63028]: value = "task-2734899" [ 584.406440] env[63028]: _type = "Task" [ 584.406440] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.414304] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2734899, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.426269] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 584.426664] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 584.426787] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 584.426989] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Deleting the datastore file [datastore2] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 584.427243] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cca3de2-0488-4dc7-b749-b938709360fc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.429141] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b88e57e7-adbc-4363-8bd4-e69a3aa72fcb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.435843] env[63028]: DEBUG oslo_vmware.api [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 584.435843] env[63028]: value = "task-2734900" [ 584.435843] env[63028]: _type = "Task" [ 584.435843] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.437135] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 584.437135] env[63028]: value = "task-2734901" [ 584.437135] env[63028]: _type = "Task" [ 584.437135] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.449646] env[63028]: DEBUG oslo_vmware.api [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734900, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.452991] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734901, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.485552] env[63028]: DEBUG nova.compute.utils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 584.487153] env[63028]: DEBUG nova.compute.manager [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 584.487328] env[63028]: DEBUG nova.network.neutron [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 584.534875] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734892, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.645517] env[63028]: DEBUG oslo_concurrency.lockutils [req-a7d549bb-eaae-4dd6-b739-a704dcd34bd1 req-f1f89478-4360-4628-a13c-1d4334b2ae36 service nova] Releasing lock "refresh_cache-e20ed04f-205b-4aa9-b8b6-e352cd237412" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.671553] env[63028]: DEBUG nova.policy [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '80457ede6ae84c748d67e550a68387e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e9b6ca6cccb940f0a516e265a721fd03', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 584.690841] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734896, 'name': Rename_Task, 'duration_secs': 0.19154} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.690841] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 584.690841] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8a4c643-bd34-461e-a132-4a1408a8f188 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.702308] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Waiting for the task: (returnval){ [ 584.702308] env[63028]: value = "task-2734902" [ 584.702308] env[63028]: _type = "Task" [ 584.702308] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.710735] env[63028]: DEBUG nova.compute.manager [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 584.711096] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 584.711926] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734902, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.712696] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30f3739-8238-473a-beaa-c3495eda5a13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.721844] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 584.724035] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ca60830-99c8-4856-97db-25851c8affae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.729646] env[63028]: DEBUG oslo_vmware.api [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Waiting for the task: (returnval){ [ 584.729646] env[63028]: value = "task-2734903" [ 584.729646] env[63028]: _type = "Task" [ 584.729646] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.738631] env[63028]: DEBUG oslo_vmware.api [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Task: {'id': task-2734903, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.890492] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53ace91-e3ca-434a-9184-18e1be83d4f7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.897386] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734897, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.902723] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb6e61b-6ab0-4347-9085-ad8da3a9b856 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.954139] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a5e8c5-c9d2-4100-b5b4-ea9909c20a35 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.957368] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2734899, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.32547} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.957368] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 584.958837] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05cfca04-c43a-42ce-934b-0008e2f7bcfd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.969709] env[63028]: DEBUG oslo_vmware.api [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734900, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.969971] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734901, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.27594} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.971362] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 584.971546] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 584.971876] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 584.975181] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0764d9c3-1f53-4128-bc2b-d41135832e5b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.992909] env[63028]: DEBUG nova.compute.manager [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 584.995813] env[63028]: DEBUG nova.compute.provider_tree [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.015408] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] e20ed04f-205b-4aa9-b8b6-e352cd237412/e20ed04f-205b-4aa9-b8b6-e352cd237412.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 585.016153] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d9f0b40-6900-4440-9ec0-4f94520849aa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.042912] env[63028]: DEBUG oslo_vmware.api [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734892, 'name': PowerOnVM_Task, 'duration_secs': 1.25543} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.044505] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 585.044816] env[63028]: INFO nova.compute.manager [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Took 6.48 seconds to spawn the instance on the hypervisor. [ 585.045049] env[63028]: DEBUG nova.compute.manager [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 585.046672] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 585.046672] env[63028]: value = "task-2734904" [ 585.046672] env[63028]: _type = "Task" [ 585.046672] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.048116] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02d226b-eb3d-4faa-aa89-cdb85e746baf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.188790] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquiring lock "44fca05f-51db-4252-bcf8-6bcad37a6147" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.189036] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lock "44fca05f-51db-4252-bcf8-6bcad37a6147" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.212967] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734902, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.245161] env[63028]: DEBUG oslo_vmware.api [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Task: {'id': task-2734903, 'name': PowerOffVM_Task, 'duration_secs': 0.409377} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.246739] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 585.247041] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 585.253752] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2224a12d-a3dd-4135-a824-b48ec5fad729 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.270025] env[63028]: DEBUG nova.network.neutron [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Successfully updated port: f7a43b4b-f49a-4b79-b488-55fd4852195c {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 585.333714] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 585.334023] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 585.335825] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Deleting the datastore file [datastore2] 679fca11-7390-4596-ab74-2f82a6cf8858 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 585.335825] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16064217-80da-4de0-bca5-85c18bdff276 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.342809] env[63028]: DEBUG oslo_vmware.api [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Waiting for the task: (returnval){ [ 585.342809] env[63028]: value = "task-2734906" [ 585.342809] env[63028]: _type = "Task" [ 585.342809] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.357997] env[63028]: DEBUG oslo_vmware.api [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Task: {'id': task-2734906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.397537] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734897, 'name': ReconfigVM_Task, 'duration_secs': 0.77393} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.397537] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8/2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 585.397537] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbb0356f-2e66-4211-a310-808e9da01bb8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.404159] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 585.404159] env[63028]: value = "task-2734907" [ 585.404159] env[63028]: _type = "Task" [ 585.404159] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.412974] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734907, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.454539] env[63028]: DEBUG oslo_vmware.api [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734900, 'name': PowerOffVM_Task, 'duration_secs': 0.792112} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.454539] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 585.454924] env[63028]: DEBUG nova.compute.manager [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 585.455458] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d57fb69-72fa-45ba-85c2-b084a44a12d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.521873] env[63028]: DEBUG nova.scheduler.client.report [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 585.562668] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2734904, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.573472] env[63028]: INFO nova.compute.manager [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Took 14.93 seconds to build instance. [ 585.719329] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734902, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.759219] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Acquiring lock "a167df01-05e4-453d-8800-9c104d912474" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.762276] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Lock "a167df01-05e4-453d-8800-9c104d912474" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.762276] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Acquiring lock "a167df01-05e4-453d-8800-9c104d912474-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.762276] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Lock "a167df01-05e4-453d-8800-9c104d912474-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.762276] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Lock "a167df01-05e4-453d-8800-9c104d912474-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.763883] env[63028]: INFO nova.compute.manager [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Terminating instance [ 585.773101] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Acquiring lock "refresh_cache-67440140-a619-41f2-98fe-eff23e8ad8a5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.776021] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Acquired lock "refresh_cache-67440140-a619-41f2-98fe-eff23e8ad8a5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.776021] env[63028]: DEBUG nova.network.neutron [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 585.853640] env[63028]: DEBUG oslo_vmware.api [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Task: {'id': task-2734906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.912513] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734907, 'name': Rename_Task, 'duration_secs': 0.215672} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.912931] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 585.913305] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25ddaf3b-2065-4212-96b3-5b61dcb3c431 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.916453] env[63028]: DEBUG nova.network.neutron [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Successfully created port: fea60f3c-e539-418e-abfc-a7a41c223938 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 585.919668] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 585.919668] env[63028]: value = "task-2734908" [ 585.919668] env[63028]: _type = "Task" [ 585.919668] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.928101] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734908, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.971939] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75180ae5-0e94-4a4f-a14c-d6517b6ebb49 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "f311a533-5c48-410b-ba3b-58f0032c8816" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.569s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.023879] env[63028]: DEBUG nova.virt.hardware [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 586.023879] env[63028]: DEBUG nova.virt.hardware [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 586.023879] env[63028]: DEBUG nova.virt.hardware [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 586.023879] env[63028]: DEBUG nova.virt.hardware [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 586.024144] env[63028]: DEBUG nova.virt.hardware [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 586.024144] env[63028]: DEBUG nova.virt.hardware [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 586.024528] env[63028]: DEBUG nova.virt.hardware [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 586.024841] env[63028]: DEBUG nova.virt.hardware [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 586.025142] env[63028]: DEBUG nova.virt.hardware [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 586.025588] env[63028]: DEBUG nova.virt.hardware [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 586.025874] env[63028]: DEBUG nova.virt.hardware [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 586.027382] env[63028]: DEBUG nova.compute.manager [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 586.029921] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.053s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.032217] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d55b6b-e208-4f24-b2d8-f64582ec6630 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.036213] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.782s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.038246] env[63028]: INFO nova.compute.claims [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.050311] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2e5ee0-9b08-473f-baa0-1270bd70f0e1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.067276] env[63028]: DEBUG nova.virt.hardware [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 586.067651] env[63028]: DEBUG nova.virt.hardware [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 586.068418] env[63028]: DEBUG nova.virt.hardware [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 586.068718] env[63028]: DEBUG nova.virt.hardware [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 586.068970] env[63028]: DEBUG nova.virt.hardware [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 586.069268] env[63028]: DEBUG nova.virt.hardware [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 586.069571] env[63028]: DEBUG nova.virt.hardware [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 586.069784] env[63028]: DEBUG nova.virt.hardware [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 586.070038] env[63028]: DEBUG nova.virt.hardware [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 586.070635] env[63028]: DEBUG nova.virt.hardware [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 586.070635] env[63028]: DEBUG nova.virt.hardware [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 586.071827] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec067d9-c00a-4a8e-aa92-8e98ac6f5cb4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.079989] env[63028]: INFO nova.scheduler.client.report [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Deleted allocations for instance 03a19e41-1146-4560-8d93-16a23aa952da [ 586.094704] env[63028]: DEBUG oslo_concurrency.lockutils [None req-708d0fa9-8389-4fb9-b91d-cffdbfa9c678 tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Lock "1eeb96d1-6e03-4192-a9db-955444519fd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.471s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.094985] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2734904, 'name': ReconfigVM_Task, 'duration_secs': 0.615336} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.095535] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Instance VIF info [] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 586.102145] env[63028]: DEBUG oslo.service.loopingcall [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.108519] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Reconfigured VM instance instance-00000008 to attach disk [datastore1] e20ed04f-205b-4aa9-b8b6-e352cd237412/e20ed04f-205b-4aa9-b8b6-e352cd237412.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 586.109171] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 586.113693] env[63028]: DEBUG nova.compute.manager [req-1678c6a9-6915-4cc3-bfd9-4303709a0c4d req-76213b8b-c4ad-4153-8247-d002d2ea89e3 service nova] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Received event network-vif-plugged-f7a43b4b-f49a-4b79-b488-55fd4852195c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 586.113826] env[63028]: DEBUG oslo_concurrency.lockutils [req-1678c6a9-6915-4cc3-bfd9-4303709a0c4d req-76213b8b-c4ad-4153-8247-d002d2ea89e3 service nova] Acquiring lock "67440140-a619-41f2-98fe-eff23e8ad8a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.114115] env[63028]: DEBUG oslo_concurrency.lockutils [req-1678c6a9-6915-4cc3-bfd9-4303709a0c4d req-76213b8b-c4ad-4153-8247-d002d2ea89e3 service nova] Lock "67440140-a619-41f2-98fe-eff23e8ad8a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.114291] env[63028]: DEBUG oslo_concurrency.lockutils [req-1678c6a9-6915-4cc3-bfd9-4303709a0c4d req-76213b8b-c4ad-4153-8247-d002d2ea89e3 service nova] Lock "67440140-a619-41f2-98fe-eff23e8ad8a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.114472] env[63028]: DEBUG nova.compute.manager [req-1678c6a9-6915-4cc3-bfd9-4303709a0c4d req-76213b8b-c4ad-4153-8247-d002d2ea89e3 service nova] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] No waiting events found dispatching network-vif-plugged-f7a43b4b-f49a-4b79-b488-55fd4852195c {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 586.114617] env[63028]: WARNING nova.compute.manager [req-1678c6a9-6915-4cc3-bfd9-4303709a0c4d req-76213b8b-c4ad-4153-8247-d002d2ea89e3 service nova] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Received unexpected event network-vif-plugged-f7a43b4b-f49a-4b79-b488-55fd4852195c for instance with vm_state building and task_state spawning. [ 586.118504] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9d8eb29-5be2-4648-a8d5-c2713c32ec75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.120218] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c89bc437-9520-41c3-8f82-e0bb82cd9c02 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.135359] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0bfc9c3-28a8-4976-91b0-1a0ea1ab43ed {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.156188] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 586.156188] env[63028]: value = "task-2734909" [ 586.156188] env[63028]: _type = "Task" [ 586.156188] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.156188] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 586.156188] env[63028]: value = "task-2734910" [ 586.156188] env[63028]: _type = "Task" [ 586.156188] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.167661] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2734909, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.171331] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734910, 'name': CreateVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.217040] env[63028]: DEBUG oslo_vmware.api [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734902, 'name': PowerOnVM_Task, 'duration_secs': 1.184143} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.217040] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 586.217040] env[63028]: INFO nova.compute.manager [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Took 12.36 seconds to spawn the instance on the hypervisor. [ 586.217216] env[63028]: DEBUG nova.compute.manager [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 586.218441] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af22fae5-6d71-4257-92a7-9bcb9cabb306 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.274183] env[63028]: DEBUG nova.compute.manager [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 586.274183] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 586.274183] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15927d0c-dc20-4937-ba72-602804e698f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.287951] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 586.288613] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64f16832-6230-4e39-8523-83353618ff5b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.298025] env[63028]: DEBUG oslo_vmware.api [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Waiting for the task: (returnval){ [ 586.298025] env[63028]: value = "task-2734911" [ 586.298025] env[63028]: _type = "Task" [ 586.298025] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.309338] env[63028]: DEBUG oslo_vmware.api [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.353655] env[63028]: DEBUG oslo_vmware.api [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Task: {'id': task-2734906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.906516} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.353901] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 586.354166] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 586.354954] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 586.354954] env[63028]: INFO nova.compute.manager [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Took 1.64 seconds to destroy the instance on the hypervisor. [ 586.354954] env[63028]: DEBUG oslo.service.loopingcall [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.354954] env[63028]: DEBUG nova.compute.manager [-] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 586.354954] env[63028]: DEBUG nova.network.neutron [-] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 586.431647] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734908, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.435891] env[63028]: DEBUG nova.network.neutron [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.616533] env[63028]: DEBUG nova.compute.manager [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 586.625783] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4a45f73-e5da-48b3-b2dc-442bd505e719 tempest-ServersAaction247Test-270802812 tempest-ServersAaction247Test-270802812-project-member] Lock "03a19e41-1146-4560-8d93-16a23aa952da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.909s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.676080] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2734909, 'name': Rename_Task, 'duration_secs': 0.184915} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.680978] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 586.681505] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734910, 'name': CreateVM_Task, 'duration_secs': 0.294165} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.681760] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-948fa821-beaf-4431-8301-a58eca6d9f04 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.683363] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 586.684044] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.684220] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.684530] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 586.685337] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31808bd1-447e-445d-9515-472c61aef1c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.689649] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 586.689649] env[63028]: value = "task-2734912" [ 586.689649] env[63028]: _type = "Task" [ 586.689649] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.690976] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 586.690976] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5268542b-20a5-cac5-bc4e-59a42bad2576" [ 586.690976] env[63028]: _type = "Task" [ 586.690976] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.702680] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2734912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.704907] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5268542b-20a5-cac5-bc4e-59a42bad2576, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.739216] env[63028]: INFO nova.compute.manager [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Took 19.59 seconds to build instance. [ 586.809202] env[63028]: DEBUG oslo_vmware.api [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734911, 'name': PowerOffVM_Task, 'duration_secs': 0.177461} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.809638] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 586.809744] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 586.810948] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0eae0de-511b-4386-9336-b98b8a5a1310 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.864722] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 586.866225] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 586.866225] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Deleting the datastore file [datastore2] a167df01-05e4-453d-8800-9c104d912474 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 586.866225] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c610614f-6de9-4728-884a-9ffa89bf2d4c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.872192] env[63028]: DEBUG oslo_vmware.api [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Waiting for the task: (returnval){ [ 586.872192] env[63028]: value = "task-2734914" [ 586.872192] env[63028]: _type = "Task" [ 586.872192] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.882095] env[63028]: DEBUG oslo_vmware.api [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734914, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.937119] env[63028]: DEBUG oslo_vmware.api [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734908, 'name': PowerOnVM_Task, 'duration_secs': 0.618916} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.937629] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 586.937629] env[63028]: INFO nova.compute.manager [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Took 5.94 seconds to spawn the instance on the hypervisor. [ 586.937694] env[63028]: DEBUG nova.compute.manager [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 586.939572] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a77568-c477-42f1-a7f9-6c8fde6b4aef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.133986] env[63028]: DEBUG nova.network.neutron [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Updating instance_info_cache with network_info: [{"id": "f7a43b4b-f49a-4b79-b488-55fd4852195c", "address": "fa:16:3e:ac:e7:06", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7a43b4b-f4", "ovs_interfaceid": "f7a43b4b-f49a-4b79-b488-55fd4852195c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.143872] env[63028]: DEBUG nova.compute.manager [None req-8ae7fcc8-834d-4c37-b231-bcf51ec54e90 tempest-ServerDiagnosticsV248Test-1492762669 tempest-ServerDiagnosticsV248Test-1492762669-project-admin] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 587.145381] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f23c7b-9234-4e8d-8858-fce7d8c68637 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.158900] env[63028]: INFO nova.compute.manager [None req-8ae7fcc8-834d-4c37-b231-bcf51ec54e90 tempest-ServerDiagnosticsV248Test-1492762669 tempest-ServerDiagnosticsV248Test-1492762669-project-admin] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Retrieving diagnostics [ 587.159989] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba89af2-0dcc-48ae-83f0-2c6b5a924ea4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.163512] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.209766] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5268542b-20a5-cac5-bc4e-59a42bad2576, 'name': SearchDatastore_Task, 'duration_secs': 0.020114} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.216909] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.217191] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 587.217422] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.217563] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.217759] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 587.218628] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2734912, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.221358] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-760d6f98-2c8d-49d9-89ef-31f39620bb9f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.232372] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 587.232608] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 587.233992] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d65f0b03-21a1-4100-8777-4ccc06ac64c9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.239933] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 587.239933] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5223f90d-9f35-3ce8-d708-3d749edab462" [ 587.239933] env[63028]: _type = "Task" [ 587.239933] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.246534] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74efb235-9e92-4f7d-b153-82f0da18a05e tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Lock "f80df630-327b-4923-a785-5d2e48fe1f19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.114s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.261163] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5223f90d-9f35-3ce8-d708-3d749edab462, 'name': SearchDatastore_Task, 'duration_secs': 0.009549} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.262099] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50d5ea02-40a0-416d-ad8a-22007d24b865 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.283562] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 587.283562] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52871a9e-54ec-c023-7b73-eee9a9411e47" [ 587.283562] env[63028]: _type = "Task" [ 587.283562] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.299914] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52871a9e-54ec-c023-7b73-eee9a9411e47, 'name': SearchDatastore_Task, 'duration_secs': 0.009601} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.304519] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.305354] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1/94b1bf30-0f9b-4197-99ff-6631a13ab2d1.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 587.308342] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b774d73e-77a1-490d-8866-db355c5c5261 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.316936] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 587.316936] env[63028]: value = "task-2734915" [ 587.316936] env[63028]: _type = "Task" [ 587.316936] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.333678] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734915, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.386255] env[63028]: DEBUG oslo_vmware.api [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Task: {'id': task-2734914, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141749} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.386816] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 587.386816] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 587.387017] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 587.388118] env[63028]: INFO nova.compute.manager [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [instance: a167df01-05e4-453d-8800-9c104d912474] Took 1.11 seconds to destroy the instance on the hypervisor. [ 587.388118] env[63028]: DEBUG oslo.service.loopingcall [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 587.388118] env[63028]: DEBUG nova.compute.manager [-] [instance: a167df01-05e4-453d-8800-9c104d912474] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 587.388118] env[63028]: DEBUG nova.network.neutron [-] [instance: a167df01-05e4-453d-8800-9c104d912474] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 587.454763] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9e1ad7-fce6-4bc3-951d-2f6f04a8da68 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.466660] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c22f06c-2a18-410e-b5c9-47660673240e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.470338] env[63028]: INFO nova.compute.manager [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Took 15.08 seconds to build instance. [ 587.501511] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67a94c7-65f7-4933-b333-278fc88626d4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.510606] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5f4ab8-3803-4873-bfc8-52e17d40c626 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.526323] env[63028]: DEBUG nova.compute.provider_tree [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.639252] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Releasing lock "refresh_cache-67440140-a619-41f2-98fe-eff23e8ad8a5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.639635] env[63028]: DEBUG nova.compute.manager [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Instance network_info: |[{"id": "f7a43b4b-f49a-4b79-b488-55fd4852195c", "address": "fa:16:3e:ac:e7:06", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7a43b4b-f4", "ovs_interfaceid": "f7a43b4b-f49a-4b79-b488-55fd4852195c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 587.642030] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:e7:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7a43b4b-f49a-4b79-b488-55fd4852195c', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 587.657275] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Creating folder: Project (1650f0c2f7654117abc43478061bbc00). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 587.657609] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb6a2ca8-8e02-4a77-9fce-a0beda3ad67e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.671857] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Created folder: Project (1650f0c2f7654117abc43478061bbc00) in parent group-v550570. [ 587.672079] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Creating folder: Instances. Parent ref: group-v550603. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 587.672342] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80e7103b-7213-4871-b0f4-7de865e8b908 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.684259] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Created folder: Instances in parent group-v550603. [ 587.684259] env[63028]: DEBUG oslo.service.loopingcall [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 587.684259] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 587.684502] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6afa15e-610e-4701-85bc-2db3b3b8deaf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.718421] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 587.718421] env[63028]: value = "task-2734918" [ 587.718421] env[63028]: _type = "Task" [ 587.718421] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.718715] env[63028]: DEBUG oslo_vmware.api [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2734912, 'name': PowerOnVM_Task, 'duration_secs': 0.718727} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.719419] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 587.719744] env[63028]: INFO nova.compute.manager [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Took 11.51 seconds to spawn the instance on the hypervisor. [ 587.719939] env[63028]: DEBUG nova.compute.manager [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 587.724041] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3034bc-cfee-4690-a93a-b150c02dab1a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.737023] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734918, 'name': CreateVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.764701] env[63028]: DEBUG nova.compute.manager [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 587.778653] env[63028]: DEBUG nova.network.neutron [-] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.829043] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734915, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.974376] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2243ffd-3e93-4ac3-8cfa-6c07411b0613 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.598s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.030524] env[63028]: DEBUG nova.scheduler.client.report [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 588.247943] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734918, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.254027] env[63028]: INFO nova.compute.manager [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Took 17.93 seconds to build instance. [ 588.281219] env[63028]: INFO nova.compute.manager [-] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Took 1.93 seconds to deallocate network for instance. [ 588.299620] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.336164] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734915, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524178} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.336882] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1/94b1bf30-0f9b-4197-99ff-6631a13ab2d1.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 588.337241] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 588.337436] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ef49628-d9cd-4853-8204-2b25fe6ea2c4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.353180] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 588.353180] env[63028]: value = "task-2734919" [ 588.353180] env[63028]: _type = "Task" [ 588.353180] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.372681] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734919, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.394474] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "f3277886-4498-45c6-be68-e71d8293dc00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.394735] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "f3277886-4498-45c6-be68-e71d8293dc00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.537297] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.501s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.537968] env[63028]: DEBUG nova.compute.manager [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 588.541412] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.828s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.543564] env[63028]: INFO nova.compute.claims [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 588.568483] env[63028]: DEBUG nova.network.neutron [-] [instance: a167df01-05e4-453d-8800-9c104d912474] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.719492] env[63028]: DEBUG nova.network.neutron [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Successfully updated port: fea60f3c-e539-418e-abfc-a7a41c223938 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 588.737845] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734918, 'name': CreateVM_Task, 'duration_secs': 0.561134} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.737960] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 588.738688] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.738862] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.739229] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 588.739783] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd79773a-a508-4784-a3af-bcf0f5deb520 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.749219] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Waiting for the task: (returnval){ [ 588.749219] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5229f434-f400-54ed-f258-6331f5e5e0e0" [ 588.749219] env[63028]: _type = "Task" [ 588.749219] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.755292] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7796d56a-98c6-481b-9d55-c9c5a993a07a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "e20ed04f-205b-4aa9-b8b6-e352cd237412" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.439s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.759329] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5229f434-f400-54ed-f258-6331f5e5e0e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.794441] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.868869] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734919, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095098} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.869266] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 588.870362] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86a9daa-a543-479f-bf51-1bc1093043ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.892972] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1/94b1bf30-0f9b-4197-99ff-6631a13ab2d1.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 588.894517] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e4ba19f-ac70-4105-ac08-9bbf3c3d6e76 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.909106] env[63028]: DEBUG nova.compute.manager [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 588.916795] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 588.916795] env[63028]: value = "task-2734920" [ 588.916795] env[63028]: _type = "Task" [ 588.916795] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.925509] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734920, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.051699] env[63028]: DEBUG nova.compute.utils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 589.056600] env[63028]: DEBUG nova.compute.manager [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 589.056600] env[63028]: DEBUG nova.network.neutron [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 589.075137] env[63028]: INFO nova.compute.manager [-] [instance: a167df01-05e4-453d-8800-9c104d912474] Took 1.69 seconds to deallocate network for instance. [ 589.152080] env[63028]: DEBUG nova.policy [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f67c0caec73f4a8bb333737416142677', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3d1798e23e64325a3b6f699cd27d98f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 589.231141] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "refresh_cache-d663c2df-ae54-4c50-a70f-e2180700c700" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.231141] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquired lock "refresh_cache-d663c2df-ae54-4c50-a70f-e2180700c700" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.231141] env[63028]: DEBUG nova.network.neutron [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 589.265720] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5229f434-f400-54ed-f258-6331f5e5e0e0, 'name': SearchDatastore_Task, 'duration_secs': 0.016094} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.266045] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.266471] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 589.266704] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.266908] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.267469] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 589.267769] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79257bf1-e5c2-423e-85fb-2861461800d1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.282626] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 589.282626] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 589.283780] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-100e8565-048b-4385-bd6f-ade0efa1ef31 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.293569] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Waiting for the task: (returnval){ [ 589.293569] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5278d07b-a631-5570-f72a-625023de52a4" [ 589.293569] env[63028]: _type = "Task" [ 589.293569] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.304567] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5278d07b-a631-5570-f72a-625023de52a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.434480] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734920, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.445551] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.556905] env[63028]: DEBUG nova.compute.manager [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 589.587647] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.792050] env[63028]: DEBUG nova.network.neutron [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.807973] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5278d07b-a631-5570-f72a-625023de52a4, 'name': SearchDatastore_Task, 'duration_secs': 0.011283} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.808744] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdbc7dcc-5e18-4220-a416-33075331fd3e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.815569] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Waiting for the task: (returnval){ [ 589.815569] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524497a6-9d03-847f-38cc-954599957a6a" [ 589.815569] env[63028]: _type = "Task" [ 589.815569] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.829688] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524497a6-9d03-847f-38cc-954599957a6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.889775] env[63028]: DEBUG nova.network.neutron [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Successfully created port: 6dc62708-050a-40f3-b99a-f51b25937806 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.899386] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd5b0b3-d343-4ca4-b1a5-6b03eef5f0b3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.911238] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9c022e-bd30-4492-ae13-9e22a5c48cc2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.952944] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73894c43-729f-4f5c-8b7d-2a0859602e22 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.959042] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734920, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.965254] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3defc2-94b0-4402-8a5d-3b4f5b0c744e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.981317] env[63028]: DEBUG nova.compute.provider_tree [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.166415] env[63028]: DEBUG nova.compute.manager [req-169e7a38-ed74-4b4a-85c4-9d5208060cc7 req-952898ba-0fe2-4e87-a8eb-cd55ebe247e5 service nova] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Received event network-changed-f7a43b4b-f49a-4b79-b488-55fd4852195c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 590.166640] env[63028]: DEBUG nova.compute.manager [req-169e7a38-ed74-4b4a-85c4-9d5208060cc7 req-952898ba-0fe2-4e87-a8eb-cd55ebe247e5 service nova] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Refreshing instance network info cache due to event network-changed-f7a43b4b-f49a-4b79-b488-55fd4852195c. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 590.166709] env[63028]: DEBUG oslo_concurrency.lockutils [req-169e7a38-ed74-4b4a-85c4-9d5208060cc7 req-952898ba-0fe2-4e87-a8eb-cd55ebe247e5 service nova] Acquiring lock "refresh_cache-67440140-a619-41f2-98fe-eff23e8ad8a5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.167029] env[63028]: DEBUG oslo_concurrency.lockutils [req-169e7a38-ed74-4b4a-85c4-9d5208060cc7 req-952898ba-0fe2-4e87-a8eb-cd55ebe247e5 service nova] Acquired lock "refresh_cache-67440140-a619-41f2-98fe-eff23e8ad8a5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.167176] env[63028]: DEBUG nova.network.neutron [req-169e7a38-ed74-4b4a-85c4-9d5208060cc7 req-952898ba-0fe2-4e87-a8eb-cd55ebe247e5 service nova] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Refreshing network info cache for port f7a43b4b-f49a-4b79-b488-55fd4852195c {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 590.191475] env[63028]: DEBUG nova.compute.manager [req-af1438aa-babc-4f79-847b-e8c335c271f2 req-67412eac-7faf-42e1-a8ea-c0c7d7c3b29f service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Received event network-vif-plugged-fea60f3c-e539-418e-abfc-a7a41c223938 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 590.192051] env[63028]: DEBUG oslo_concurrency.lockutils [req-af1438aa-babc-4f79-847b-e8c335c271f2 req-67412eac-7faf-42e1-a8ea-c0c7d7c3b29f service nova] Acquiring lock "d663c2df-ae54-4c50-a70f-e2180700c700-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.192051] env[63028]: DEBUG oslo_concurrency.lockutils [req-af1438aa-babc-4f79-847b-e8c335c271f2 req-67412eac-7faf-42e1-a8ea-c0c7d7c3b29f service nova] Lock "d663c2df-ae54-4c50-a70f-e2180700c700-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.193497] env[63028]: DEBUG oslo_concurrency.lockutils [req-af1438aa-babc-4f79-847b-e8c335c271f2 req-67412eac-7faf-42e1-a8ea-c0c7d7c3b29f service nova] Lock "d663c2df-ae54-4c50-a70f-e2180700c700-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.193497] env[63028]: DEBUG nova.compute.manager [req-af1438aa-babc-4f79-847b-e8c335c271f2 req-67412eac-7faf-42e1-a8ea-c0c7d7c3b29f service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] No waiting events found dispatching network-vif-plugged-fea60f3c-e539-418e-abfc-a7a41c223938 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 590.193497] env[63028]: WARNING nova.compute.manager [req-af1438aa-babc-4f79-847b-e8c335c271f2 req-67412eac-7faf-42e1-a8ea-c0c7d7c3b29f service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Received unexpected event network-vif-plugged-fea60f3c-e539-418e-abfc-a7a41c223938 for instance with vm_state building and task_state spawning. [ 590.246473] env[63028]: DEBUG nova.network.neutron [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Updating instance_info_cache with network_info: [{"id": "fea60f3c-e539-418e-abfc-a7a41c223938", "address": "fa:16:3e:64:6a:31", "network": {"id": "95ccbf6d-2bd9-42ff-93f9-5f9f541e5ba1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-863839356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9b6ca6cccb940f0a516e265a721fd03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfea60f3c-e5", "ovs_interfaceid": "fea60f3c-e539-418e-abfc-a7a41c223938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.327044] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524497a6-9d03-847f-38cc-954599957a6a, 'name': SearchDatastore_Task, 'duration_secs': 0.010495} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.327364] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.327480] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 67440140-a619-41f2-98fe-eff23e8ad8a5/67440140-a619-41f2-98fe-eff23e8ad8a5.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 590.327816] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39bc8b14-cd8d-4b02-b6a7-4141d28dfe30 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.336664] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Waiting for the task: (returnval){ [ 590.336664] env[63028]: value = "task-2734921" [ 590.336664] env[63028]: _type = "Task" [ 590.336664] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.349957] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734921, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.357354] env[63028]: DEBUG nova.compute.manager [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 590.358498] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd5a974-d901-4d52-a60d-631211a81252 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.431656] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734920, 'name': ReconfigVM_Task, 'duration_secs': 1.408699} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.432035] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1/94b1bf30-0f9b-4197-99ff-6631a13ab2d1.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 590.432781] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71987b68-c58b-4279-8c91-05df5edbf594 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.441629] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 590.441629] env[63028]: value = "task-2734922" [ 590.441629] env[63028]: _type = "Task" [ 590.441629] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.453036] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734922, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.485684] env[63028]: DEBUG nova.scheduler.client.report [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 590.568215] env[63028]: DEBUG nova.compute.manager [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 590.606352] env[63028]: DEBUG nova.virt.hardware [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 590.606352] env[63028]: DEBUG nova.virt.hardware [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 590.606565] env[63028]: DEBUG nova.virt.hardware [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 590.606973] env[63028]: DEBUG nova.virt.hardware [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 590.607110] env[63028]: DEBUG nova.virt.hardware [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 590.607318] env[63028]: DEBUG nova.virt.hardware [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 590.607990] env[63028]: DEBUG nova.virt.hardware [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 590.608297] env[63028]: DEBUG nova.virt.hardware [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 590.608488] env[63028]: DEBUG nova.virt.hardware [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 590.608848] env[63028]: DEBUG nova.virt.hardware [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 590.609666] env[63028]: DEBUG nova.virt.hardware [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 590.610431] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0cacc5-48e4-40d8-9fbb-4b35f9b3e176 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.622169] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25364961-291a-40c1-a1d7-e77f0e130534 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.752498] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Releasing lock "refresh_cache-d663c2df-ae54-4c50-a70f-e2180700c700" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.752498] env[63028]: DEBUG nova.compute.manager [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Instance network_info: |[{"id": "fea60f3c-e539-418e-abfc-a7a41c223938", "address": "fa:16:3e:64:6a:31", "network": {"id": "95ccbf6d-2bd9-42ff-93f9-5f9f541e5ba1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-863839356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9b6ca6cccb940f0a516e265a721fd03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfea60f3c-e5", "ovs_interfaceid": "fea60f3c-e539-418e-abfc-a7a41c223938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 590.752759] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:6a:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fea60f3c-e539-418e-abfc-a7a41c223938', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 590.761552] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Creating folder: Project (e9b6ca6cccb940f0a516e265a721fd03). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.762046] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2101a7c8-cc02-404b-9e1c-4dd5e77a7b50 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.778668] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Created folder: Project (e9b6ca6cccb940f0a516e265a721fd03) in parent group-v550570. [ 590.778804] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Creating folder: Instances. Parent ref: group-v550606. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.779055] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e62627f-0a5e-49c2-a8ad-0f63961fa82d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.792904] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Created folder: Instances in parent group-v550606. [ 590.792904] env[63028]: DEBUG oslo.service.loopingcall [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 590.793153] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 590.793852] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-faf85b46-1d6e-4eda-ba82-64b3dcbeca59 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.817427] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 590.817427] env[63028]: value = "task-2734925" [ 590.817427] env[63028]: _type = "Task" [ 590.817427] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.829643] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734925, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.850476] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734921, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50443} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.851164] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 67440140-a619-41f2-98fe-eff23e8ad8a5/67440140-a619-41f2-98fe-eff23e8ad8a5.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 590.851502] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 590.851863] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-647b6a7a-a430-4082-bdea-ef4aee754374 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.863244] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Waiting for the task: (returnval){ [ 590.863244] env[63028]: value = "task-2734926" [ 590.863244] env[63028]: _type = "Task" [ 590.863244] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.873328] env[63028]: INFO nova.compute.manager [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] instance snapshotting [ 590.873543] env[63028]: WARNING nova.compute.manager [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 590.875192] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734926, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.878333] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4429ac09-7f4e-4f0a-8c9b-d890d884a082 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.900169] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa99d848-d248-48ac-b642-11fb26427b10 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.953277] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734922, 'name': Rename_Task, 'duration_secs': 0.451777} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.955896] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 590.956176] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b617627-270d-4917-9dfd-cccd250d331e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.964735] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 590.964735] env[63028]: value = "task-2734927" [ 590.964735] env[63028]: _type = "Task" [ 590.964735] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.976872] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734927, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.991358] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.450s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.991883] env[63028]: DEBUG nova.compute.manager [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 590.994528] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.640s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.995898] env[63028]: INFO nova.compute.claims [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 591.328763] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734925, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.352283] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Acquiring lock "f80df630-327b-4923-a785-5d2e48fe1f19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.352547] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Lock "f80df630-327b-4923-a785-5d2e48fe1f19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.352763] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Acquiring lock "f80df630-327b-4923-a785-5d2e48fe1f19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.353061] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Lock "f80df630-327b-4923-a785-5d2e48fe1f19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.357160] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Lock "f80df630-327b-4923-a785-5d2e48fe1f19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.359543] env[63028]: INFO nova.compute.manager [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Terminating instance [ 591.375818] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734926, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073626} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.376104] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 591.377151] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee81c868-a3cb-4747-be0e-959273b47b1d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.404746] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 67440140-a619-41f2-98fe-eff23e8ad8a5/67440140-a619-41f2-98fe-eff23e8ad8a5.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 591.406131] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d23393ab-a050-4ae2-9e10-403d141a7f5e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.425405] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 591.426606] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-31643249-9b1b-4052-8782-5ce65e128959 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.434130] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Waiting for the task: (returnval){ [ 591.434130] env[63028]: value = "task-2734929" [ 591.434130] env[63028]: _type = "Task" [ 591.434130] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.435670] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 591.435670] env[63028]: value = "task-2734928" [ 591.435670] env[63028]: _type = "Task" [ 591.435670] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.448844] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734929, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.451822] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734928, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.476385] env[63028]: DEBUG oslo_vmware.api [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2734927, 'name': PowerOnVM_Task, 'duration_secs': 0.474035} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.477019] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 591.477271] env[63028]: DEBUG nova.compute.manager [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 591.478221] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5812b87b-7e2c-4123-b044-9a09a92f2bdc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.482046] env[63028]: DEBUG nova.network.neutron [req-169e7a38-ed74-4b4a-85c4-9d5208060cc7 req-952898ba-0fe2-4e87-a8eb-cd55ebe247e5 service nova] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Updated VIF entry in instance network info cache for port f7a43b4b-f49a-4b79-b488-55fd4852195c. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 591.482400] env[63028]: DEBUG nova.network.neutron [req-169e7a38-ed74-4b4a-85c4-9d5208060cc7 req-952898ba-0fe2-4e87-a8eb-cd55ebe247e5 service nova] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Updating instance_info_cache with network_info: [{"id": "f7a43b4b-f49a-4b79-b488-55fd4852195c", "address": "fa:16:3e:ac:e7:06", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7a43b4b-f4", "ovs_interfaceid": "f7a43b4b-f49a-4b79-b488-55fd4852195c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.500248] env[63028]: DEBUG nova.compute.utils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 591.505703] env[63028]: DEBUG nova.compute.manager [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Not allocating networking since 'none' was specified. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 591.829887] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734925, 'name': CreateVM_Task, 'duration_secs': 0.517338} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.830091] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 591.830799] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.831012] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.831698] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 591.831955] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b22a3496-1e39-483f-a960-31b125851ba0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.841304] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 591.841304] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520fb710-2443-e6f6-6b89-1d344fbe27e6" [ 591.841304] env[63028]: _type = "Task" [ 591.841304] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.850285] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520fb710-2443-e6f6-6b89-1d344fbe27e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.863289] env[63028]: DEBUG nova.compute.manager [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 591.863549] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 591.864477] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11f3b48-79f2-4477-8f78-b4f8f4af1c7d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.873957] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 591.874560] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73dc03e5-7812-49d3-bf75-9fd06baec00b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.883615] env[63028]: DEBUG oslo_vmware.api [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Waiting for the task: (returnval){ [ 591.883615] env[63028]: value = "task-2734930" [ 591.883615] env[63028]: _type = "Task" [ 591.883615] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.894822] env[63028]: DEBUG oslo_vmware.api [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.949705] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.952729] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734928, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.985539] env[63028]: DEBUG oslo_concurrency.lockutils [req-169e7a38-ed74-4b4a-85c4-9d5208060cc7 req-952898ba-0fe2-4e87-a8eb-cd55ebe247e5 service nova] Releasing lock "refresh_cache-67440140-a619-41f2-98fe-eff23e8ad8a5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.985809] env[63028]: DEBUG nova.compute.manager [req-169e7a38-ed74-4b4a-85c4-9d5208060cc7 req-952898ba-0fe2-4e87-a8eb-cd55ebe247e5 service nova] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Received event network-vif-deleted-4d374544-7b1e-450d-a5ad-0ee73b15715d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 591.986050] env[63028]: DEBUG nova.compute.manager [req-169e7a38-ed74-4b4a-85c4-9d5208060cc7 req-952898ba-0fe2-4e87-a8eb-cd55ebe247e5 service nova] [instance: a167df01-05e4-453d-8800-9c104d912474] Received event network-vif-deleted-10ff9499-257b-4aba-99fa-2ca0aaedc466 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 592.001866] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.004660] env[63028]: DEBUG nova.compute.manager [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 592.338463] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35bdd29c-e46d-46ed-99dc-09c5a8d48c6f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.354119] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c52cd2-7d66-4221-b6e4-462e7c765582 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.357484] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520fb710-2443-e6f6-6b89-1d344fbe27e6, 'name': SearchDatastore_Task, 'duration_secs': 0.04467} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.357775] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.357998] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 592.358240] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.358378] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.358550] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 592.359679] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ee1a8bd-ef2c-499e-bbad-d7580edaccf1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.391671] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202986f9-3a99-47d3-ad79-d50421be45c9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.394853] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 592.395709] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 592.396039] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb9c67ec-5d04-454f-89f0-e0c7ec79014f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.408588] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96e6b9d-98c5-469d-b545-dfc3c2fdd438 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.412813] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 592.412813] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52589b24-0a83-106d-b49d-d43ab2247627" [ 592.412813] env[63028]: _type = "Task" [ 592.412813] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.413067] env[63028]: DEBUG oslo_vmware.api [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734930, 'name': PowerOffVM_Task, 'duration_secs': 0.421005} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.413395] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 592.413669] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 592.419081] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7b975e2-cff4-433e-9d2e-2466d56653dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.428577] env[63028]: DEBUG nova.compute.provider_tree [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.438091] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52589b24-0a83-106d-b49d-d43ab2247627, 'name': SearchDatastore_Task, 'duration_secs': 0.014172} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.442472] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df1fb0e0-180a-4325-98af-08f40b4c54ee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.457744] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734929, 'name': ReconfigVM_Task, 'duration_secs': 0.909969} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.464511] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 67440140-a619-41f2-98fe-eff23e8ad8a5/67440140-a619-41f2-98fe-eff23e8ad8a5.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 592.464511] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 592.464511] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52637066-aecf-1936-8b4d-9cba29e2c3b4" [ 592.464511] env[63028]: _type = "Task" [ 592.464511] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.464511] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734928, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.464511] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d1e3832-fca4-48af-a8fb-384fccdbcbdd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.474289] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52637066-aecf-1936-8b4d-9cba29e2c3b4, 'name': SearchDatastore_Task, 'duration_secs': 0.017201} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.475648] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.475875] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] d663c2df-ae54-4c50-a70f-e2180700c700/d663c2df-ae54-4c50-a70f-e2180700c700.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 592.476219] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Waiting for the task: (returnval){ [ 592.476219] env[63028]: value = "task-2734932" [ 592.476219] env[63028]: _type = "Task" [ 592.476219] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.476442] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c74dcf2-70a9-4ea1-bbe0-e71172c74089 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.488271] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734932, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.490617] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 592.490617] env[63028]: value = "task-2734933" [ 592.490617] env[63028]: _type = "Task" [ 592.490617] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.504982] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 592.504982] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 592.504982] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Deleting the datastore file [datastore2] f80df630-327b-4923-a785-5d2e48fe1f19 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 592.504982] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-822d3c1e-7c03-4e9c-889f-f07cd71add55 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.521101] env[63028]: DEBUG oslo_vmware.api [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Waiting for the task: (returnval){ [ 592.521101] env[63028]: value = "task-2734934" [ 592.521101] env[63028]: _type = "Task" [ 592.521101] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.528391] env[63028]: DEBUG oslo_vmware.api [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734934, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.933527] env[63028]: DEBUG nova.scheduler.client.report [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 592.956835] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734928, 'name': CreateSnapshot_Task, 'duration_secs': 1.189986} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.956835] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 592.957838] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68462a9a-2e1f-469d-9b9d-1f5ca383a849 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.995136] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734932, 'name': Rename_Task, 'duration_secs': 0.19146} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.999278] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 593.000635] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dbd8e7c6-6df3-4ae4-8ca4-66987fd8d677 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.010233] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2734933, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.012191] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Waiting for the task: (returnval){ [ 593.012191] env[63028]: value = "task-2734935" [ 593.012191] env[63028]: _type = "Task" [ 593.012191] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.025470] env[63028]: DEBUG nova.compute.manager [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 593.027927] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734935, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.038900] env[63028]: DEBUG oslo_vmware.api [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Task: {'id': task-2734934, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181859} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.038900] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 593.039455] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 593.039455] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 593.039455] env[63028]: INFO nova.compute.manager [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Took 1.18 seconds to destroy the instance on the hypervisor. [ 593.039706] env[63028]: DEBUG oslo.service.loopingcall [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 593.039924] env[63028]: DEBUG nova.compute.manager [-] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 593.040024] env[63028]: DEBUG nova.network.neutron [-] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 593.065856] env[63028]: DEBUG nova.virt.hardware [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 593.066249] env[63028]: DEBUG nova.virt.hardware [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 593.066428] env[63028]: DEBUG nova.virt.hardware [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 593.066619] env[63028]: DEBUG nova.virt.hardware [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 593.066928] env[63028]: DEBUG nova.virt.hardware [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 593.067121] env[63028]: DEBUG nova.virt.hardware [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 593.067457] env[63028]: DEBUG nova.virt.hardware [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 593.067538] env[63028]: DEBUG nova.virt.hardware [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 593.067660] env[63028]: DEBUG nova.virt.hardware [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 593.067908] env[63028]: DEBUG nova.virt.hardware [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 593.068095] env[63028]: DEBUG nova.virt.hardware [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 593.069608] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e57acd-6de8-4d06-9351-8799e34338b8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.080030] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624eeb9e-5e22-40eb-a24d-64a1a7b69666 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.100679] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Instance VIF info [] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 593.106712] env[63028]: DEBUG oslo.service.loopingcall [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 593.107539] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 593.107803] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b955ba54-24be-4a82-9041-c679f30ebe28 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.128680] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 593.128680] env[63028]: value = "task-2734936" [ 593.128680] env[63028]: _type = "Task" [ 593.128680] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.141062] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734936, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.163191] env[63028]: DEBUG nova.network.neutron [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Successfully updated port: 6dc62708-050a-40f3-b99a-f51b25937806 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 593.443157] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.443754] env[63028]: DEBUG nova.compute.manager [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 593.446490] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.513s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.448174] env[63028]: INFO nova.compute.claims [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.486294] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 593.487372] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4bc1c743-6d46-45c4-8cdf-47324633007c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.501472] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 593.501472] env[63028]: value = "task-2734937" [ 593.501472] env[63028]: _type = "Task" [ 593.501472] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.510741] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2734933, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652287} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.511462] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] d663c2df-ae54-4c50-a70f-e2180700c700/d663c2df-ae54-4c50-a70f-e2180700c700.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 593.511678] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 593.512250] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a188da07-6d1f-4aba-aaa0-428fd901da56 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.521085] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734937, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.542221] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734935, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.542553] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 593.542553] env[63028]: value = "task-2734938" [ 593.542553] env[63028]: _type = "Task" [ 593.542553] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.556574] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2734938, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.647388] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734936, 'name': CreateVM_Task, 'duration_secs': 0.415232} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.647490] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 593.647984] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.648282] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.648623] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 593.649910] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c283c5c7-6c52-4fe2-8ed7-7f76d8a21a14 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.657330] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 593.657330] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52742e17-ef04-01eb-4cc1-11bef43e54f0" [ 593.657330] env[63028]: _type = "Task" [ 593.657330] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.667641] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "refresh_cache-5a330ed9-c106-49f2-b524-a424e717b5ce" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.667641] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired lock "refresh_cache-5a330ed9-c106-49f2-b524-a424e717b5ce" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.667641] env[63028]: DEBUG nova.network.neutron [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 593.675608] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52742e17-ef04-01eb-4cc1-11bef43e54f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.920396] env[63028]: DEBUG nova.compute.manager [req-d3ca952a-3b02-4663-9385-f58c7bdd7b04 req-c60d6fec-1ea9-4616-ad4a-08fb1da4ee53 service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Received event network-changed-fea60f3c-e539-418e-abfc-a7a41c223938 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 593.920762] env[63028]: DEBUG nova.compute.manager [req-d3ca952a-3b02-4663-9385-f58c7bdd7b04 req-c60d6fec-1ea9-4616-ad4a-08fb1da4ee53 service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Refreshing instance network info cache due to event network-changed-fea60f3c-e539-418e-abfc-a7a41c223938. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 593.922035] env[63028]: DEBUG oslo_concurrency.lockutils [req-d3ca952a-3b02-4663-9385-f58c7bdd7b04 req-c60d6fec-1ea9-4616-ad4a-08fb1da4ee53 service nova] Acquiring lock "refresh_cache-d663c2df-ae54-4c50-a70f-e2180700c700" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.923024] env[63028]: DEBUG oslo_concurrency.lockutils [req-d3ca952a-3b02-4663-9385-f58c7bdd7b04 req-c60d6fec-1ea9-4616-ad4a-08fb1da4ee53 service nova] Acquired lock "refresh_cache-d663c2df-ae54-4c50-a70f-e2180700c700" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.923024] env[63028]: DEBUG nova.network.neutron [req-d3ca952a-3b02-4663-9385-f58c7bdd7b04 req-c60d6fec-1ea9-4616-ad4a-08fb1da4ee53 service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Refreshing network info cache for port fea60f3c-e539-418e-abfc-a7a41c223938 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 593.952833] env[63028]: DEBUG nova.compute.utils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 593.959304] env[63028]: DEBUG nova.compute.manager [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 593.959304] env[63028]: DEBUG nova.network.neutron [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 594.015481] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734937, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.031729] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734935, 'name': PowerOnVM_Task} progress is 91%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.054203] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2734938, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142734} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.054203] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 594.055132] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c781f7f-a0d2-4bec-bc34-ff2500ef9c5b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.082360] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] d663c2df-ae54-4c50-a70f-e2180700c700/d663c2df-ae54-4c50-a70f-e2180700c700.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 594.082942] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e2fefe8-24af-430c-b751-4db610420df7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.107354] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 594.107354] env[63028]: value = "task-2734939" [ 594.107354] env[63028]: _type = "Task" [ 594.107354] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.116613] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2734939, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.169575] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52742e17-ef04-01eb-4cc1-11bef43e54f0, 'name': SearchDatastore_Task, 'duration_secs': 0.015079} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.170217] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.170513] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 594.170809] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.171056] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.171257] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 594.171551] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5f38f2e-ce82-4914-bd34-457655184d93 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.188972] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 594.189158] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 594.190443] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9266398a-89f3-4c26-8b2e-4f8fab83df09 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.199812] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 594.199812] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52613163-8245-f013-9b37-918b25aca10c" [ 594.199812] env[63028]: _type = "Task" [ 594.199812] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.210846] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52613163-8245-f013-9b37-918b25aca10c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.226495] env[63028]: DEBUG nova.policy [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5eeb90d16904636b511c5ea6f4bf8af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4bc908c43a648f79f88812f86dddb6c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 594.360504] env[63028]: DEBUG nova.network.neutron [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.459334] env[63028]: DEBUG nova.compute.manager [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 594.519136] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734937, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.536847] env[63028]: DEBUG oslo_vmware.api [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734935, 'name': PowerOnVM_Task, 'duration_secs': 1.038882} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.539285] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 594.539521] env[63028]: INFO nova.compute.manager [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Took 11.05 seconds to spawn the instance on the hypervisor. [ 594.539705] env[63028]: DEBUG nova.compute.manager [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 594.544802] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f22a54b-6a7c-4e9e-a96e-9b76c8b7f3a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.632232] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2734939, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.717624] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52613163-8245-f013-9b37-918b25aca10c, 'name': SearchDatastore_Task, 'duration_secs': 0.024958} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.722486] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d7b23d5-2c6e-44a5-bb0a-397cfcd1d7bd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.729954] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 594.729954] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ed9218-4761-295f-51be-4f284c49a6d2" [ 594.729954] env[63028]: _type = "Task" [ 594.729954] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.740555] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ed9218-4761-295f-51be-4f284c49a6d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.823589] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a417a4-e8a4-4485-b970-062c62e7c24e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.832863] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed05a31-ca2e-4ee2-a4d6-226aad3e98f7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.872022] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5f28fe-562c-4ba9-8533-01751fb540a0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.876265] env[63028]: DEBUG nova.network.neutron [-] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.884331] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11dca690-b7ba-4d44-9453-0c3576fc38f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.905444] env[63028]: DEBUG nova.compute.provider_tree [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.018192] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734937, 'name': CloneVM_Task} progress is 95%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.035458] env[63028]: DEBUG nova.network.neutron [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Successfully created port: b8893183-7887-4add-9ad9-389b74e74c81 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 595.072421] env[63028]: INFO nova.compute.manager [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Took 21.42 seconds to build instance. [ 595.122806] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2734939, 'name': ReconfigVM_Task, 'duration_secs': 0.954792} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.125019] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Reconfigured VM instance instance-0000000c to attach disk [datastore1] d663c2df-ae54-4c50-a70f-e2180700c700/d663c2df-ae54-4c50-a70f-e2180700c700.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 595.125019] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09b1140b-542b-4e22-8c82-516f8e07a5a7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.134072] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 595.134072] env[63028]: value = "task-2734940" [ 595.134072] env[63028]: _type = "Task" [ 595.134072] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.147472] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2734940, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.188021] env[63028]: DEBUG nova.network.neutron [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Updating instance_info_cache with network_info: [{"id": "6dc62708-050a-40f3-b99a-f51b25937806", "address": "fa:16:3e:1a:f4:86", "network": {"id": "350f4b14-d211-48c8-b1dd-06a0dd5805d1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-987689362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b3d1798e23e64325a3b6f699cd27d98f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6dc62708-05", "ovs_interfaceid": "6dc62708-050a-40f3-b99a-f51b25937806", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.247726] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ed9218-4761-295f-51be-4f284c49a6d2, 'name': SearchDatastore_Task, 'duration_secs': 0.013933} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.248080] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.248606] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 0dbafad1-ab21-439d-bc8e-e447ac33304e/0dbafad1-ab21-439d-bc8e-e447ac33304e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 595.248901] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3d0bc36-574d-4c20-af80-825bc868f0db {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.257715] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 595.257715] env[63028]: value = "task-2734941" [ 595.257715] env[63028]: _type = "Task" [ 595.257715] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.267350] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.380304] env[63028]: INFO nova.compute.manager [-] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Took 2.34 seconds to deallocate network for instance. [ 595.408543] env[63028]: DEBUG nova.scheduler.client.report [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 595.473776] env[63028]: DEBUG nova.compute.manager [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 595.527534] env[63028]: DEBUG nova.virt.hardware [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 595.527816] env[63028]: DEBUG nova.virt.hardware [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.528053] env[63028]: DEBUG nova.virt.hardware [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 595.528297] env[63028]: DEBUG nova.virt.hardware [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.528387] env[63028]: DEBUG nova.virt.hardware [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 595.528530] env[63028]: DEBUG nova.virt.hardware [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 595.528733] env[63028]: DEBUG nova.virt.hardware [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 595.528883] env[63028]: DEBUG nova.virt.hardware [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 595.529862] env[63028]: DEBUG nova.virt.hardware [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 595.530841] env[63028]: DEBUG nova.virt.hardware [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 595.530841] env[63028]: DEBUG nova.virt.hardware [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 595.533314] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df21b4e4-c792-4af3-8c14-1c4e7313ac2d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.554746] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734937, 'name': CloneVM_Task, 'duration_secs': 1.738906} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.556663] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179150b0-5216-43f3-8135-5e2bbb8044a2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.563938] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Created linked-clone VM from snapshot [ 595.565181] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783c09e0-1f0d-4c91-bd6e-113dbe49093f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.590826] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5557dd6d-8871-4b04-902a-5fb89e3ecdba tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Lock "67440140-a619-41f2-98fe-eff23e8ad8a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.956s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.591574] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Uploading image b601ac28-7c34-45b7-b134-cb9218471bed {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 595.630711] env[63028]: DEBUG oslo_vmware.rw_handles [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 595.630711] env[63028]: value = "vm-550611" [ 595.630711] env[63028]: _type = "VirtualMachine" [ 595.630711] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 595.632028] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d9575d4f-1ea7-4f77-b9b8-698826571a21 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.650231] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2734940, 'name': Rename_Task, 'duration_secs': 0.306049} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.652647] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 595.653087] env[63028]: DEBUG oslo_vmware.rw_handles [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lease: (returnval){ [ 595.653087] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d35714-fdd9-5197-b8fd-73a856f575c1" [ 595.653087] env[63028]: _type = "HttpNfcLease" [ 595.653087] env[63028]: } obtained for exporting VM: (result){ [ 595.653087] env[63028]: value = "vm-550611" [ 595.653087] env[63028]: _type = "VirtualMachine" [ 595.653087] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 595.653334] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the lease: (returnval){ [ 595.653334] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d35714-fdd9-5197-b8fd-73a856f575c1" [ 595.653334] env[63028]: _type = "HttpNfcLease" [ 595.653334] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 595.655831] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f0522e8-3a51-44ba-8c78-141368afa2b6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.668671] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 595.668671] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d35714-fdd9-5197-b8fd-73a856f575c1" [ 595.668671] env[63028]: _type = "HttpNfcLease" [ 595.668671] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 595.670052] env[63028]: DEBUG oslo_vmware.rw_handles [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 595.670052] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d35714-fdd9-5197-b8fd-73a856f575c1" [ 595.670052] env[63028]: _type = "HttpNfcLease" [ 595.670052] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 595.671554] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59a28b1-5f52-4903-b6f5-90ad524cd830 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.679110] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 595.679110] env[63028]: value = "task-2734943" [ 595.679110] env[63028]: _type = "Task" [ 595.679110] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.690444] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Releasing lock "refresh_cache-5a330ed9-c106-49f2-b524-a424e717b5ce" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.690848] env[63028]: DEBUG nova.compute.manager [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Instance network_info: |[{"id": "6dc62708-050a-40f3-b99a-f51b25937806", "address": "fa:16:3e:1a:f4:86", "network": {"id": "350f4b14-d211-48c8-b1dd-06a0dd5805d1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-987689362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b3d1798e23e64325a3b6f699cd27d98f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6dc62708-05", "ovs_interfaceid": "6dc62708-050a-40f3-b99a-f51b25937806", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 595.691229] env[63028]: DEBUG oslo_vmware.rw_handles [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d9c42c-12f3-6631-7239-fac17995e4a1/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 595.692381] env[63028]: DEBUG oslo_vmware.rw_handles [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d9c42c-12f3-6631-7239-fac17995e4a1/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 595.694877] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:f4:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6dc62708-050a-40f3-b99a-f51b25937806', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 595.707882] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Creating folder: Project (b3d1798e23e64325a3b6f699cd27d98f). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 595.714538] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-801e44f6-480a-4b33-9e1b-aafa6ae75cc1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.720030] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2734943, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.800018] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734941, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.800194] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Created folder: Project (b3d1798e23e64325a3b6f699cd27d98f) in parent group-v550570. [ 595.800449] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Creating folder: Instances. Parent ref: group-v550612. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 595.800717] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c358cea-87c6-4d3e-afe7-edd8ff086ab8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.818109] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Created folder: Instances in parent group-v550612. [ 595.818428] env[63028]: DEBUG oslo.service.loopingcall [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 595.818639] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 595.820157] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce259c11-d63e-4985-b63a-c0dc95dc8858 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.845872] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 595.845872] env[63028]: value = "task-2734946" [ 595.845872] env[63028]: _type = "Task" [ 595.845872] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.855880] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.856169] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.865146] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734946, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.893217] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.916642] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.917611] env[63028]: DEBUG nova.compute.manager [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 595.921743] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.758s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.924696] env[63028]: INFO nova.compute.claims [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 596.016927] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a151dacb-b020-4517-8355-1ad005d45ea6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.114501] env[63028]: DEBUG nova.network.neutron [req-d3ca952a-3b02-4663-9385-f58c7bdd7b04 req-c60d6fec-1ea9-4616-ad4a-08fb1da4ee53 service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Updated VIF entry in instance network info cache for port fea60f3c-e539-418e-abfc-a7a41c223938. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 596.114868] env[63028]: DEBUG nova.network.neutron [req-d3ca952a-3b02-4663-9385-f58c7bdd7b04 req-c60d6fec-1ea9-4616-ad4a-08fb1da4ee53 service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Updating instance_info_cache with network_info: [{"id": "fea60f3c-e539-418e-abfc-a7a41c223938", "address": "fa:16:3e:64:6a:31", "network": {"id": "95ccbf6d-2bd9-42ff-93f9-5f9f541e5ba1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-863839356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9b6ca6cccb940f0a516e265a721fd03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfea60f3c-e5", "ovs_interfaceid": "fea60f3c-e539-418e-abfc-a7a41c223938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.190576] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2734943, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.302063] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.774265} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.306412] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 0dbafad1-ab21-439d-bc8e-e447ac33304e/0dbafad1-ab21-439d-bc8e-e447ac33304e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 596.306412] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 596.306412] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3725d99c-05e7-463e-a36b-197dc702a0d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.316606] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 596.316606] env[63028]: value = "task-2734950" [ 596.316606] env[63028]: _type = "Task" [ 596.316606] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.331708] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734950, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.366118] env[63028]: DEBUG nova.compute.manager [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 596.377021] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734946, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.433135] env[63028]: DEBUG nova.compute.utils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 596.445089] env[63028]: DEBUG nova.compute.manager [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 596.445347] env[63028]: DEBUG nova.network.neutron [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 596.598672] env[63028]: DEBUG nova.policy [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd706aacdd72a4762b1af0b20ff69e30e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '381de553d9da4c94b923d790c12a28a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 596.620279] env[63028]: DEBUG oslo_concurrency.lockutils [req-d3ca952a-3b02-4663-9385-f58c7bdd7b04 req-c60d6fec-1ea9-4616-ad4a-08fb1da4ee53 service nova] Releasing lock "refresh_cache-d663c2df-ae54-4c50-a70f-e2180700c700" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.695046] env[63028]: DEBUG oslo_vmware.api [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2734943, 'name': PowerOnVM_Task, 'duration_secs': 0.603131} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.697027] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 596.697027] env[63028]: INFO nova.compute.manager [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Took 10.67 seconds to spawn the instance on the hypervisor. [ 596.697327] env[63028]: DEBUG nova.compute.manager [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 596.700213] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa933d4d-d157-484a-88f7-fdb5ff5ff5de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.831681] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734950, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.866195] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734946, 'name': CreateVM_Task, 'duration_secs': 0.619009} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.868015] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 596.869268] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.869268] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.869268] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 596.869680] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74bb8876-d94e-420b-9701-ce647a82da0b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.886802] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 596.886802] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a96bbc-afab-9b60-33f8-949f167c88e8" [ 596.886802] env[63028]: _type = "Task" [ 596.886802] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.900732] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a96bbc-afab-9b60-33f8-949f167c88e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.923110] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.945128] env[63028]: DEBUG nova.compute.manager [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 597.228707] env[63028]: INFO nova.compute.manager [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Took 21.93 seconds to build instance. [ 597.343069] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734950, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.965361} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.343069] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 597.343802] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ba1cbc-8c40-44e1-9f8a-d5cc9ac46b92 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.374506] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 0dbafad1-ab21-439d-bc8e-e447ac33304e/0dbafad1-ab21-439d-bc8e-e447ac33304e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 597.376514] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efbd91e8-43ba-46f4-84a1-6f9ec8536706 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.397569] env[63028]: DEBUG nova.compute.manager [req-abd20c3d-da1b-4f0d-ae4d-5a3f562e4e83 req-d2410c18-23a5-49cc-bfed-08921d524e6a service nova] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Received event network-vif-deleted-0756fdb5-7e3d-45e1-b815-668c72c5e804 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 597.413944] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a96bbc-afab-9b60-33f8-949f167c88e8, 'name': SearchDatastore_Task, 'duration_secs': 0.026347} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.416279] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.416534] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 597.416772] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.416909] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.417092] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 597.418209] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 597.418209] env[63028]: value = "task-2734952" [ 597.418209] env[63028]: _type = "Task" [ 597.418209] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.420851] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bb3ab05-3da1-401b-bf8c-d530f47fa327 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.434059] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734952, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.440683] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 597.440888] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 597.444880] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea521da5-7952-4bcc-94c3-084cb16b608b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.456682] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 597.456682] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b38f04-bb6d-5168-ce86-b21b86952a54" [ 597.456682] env[63028]: _type = "Task" [ 597.456682] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.466103] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b38f04-bb6d-5168-ce86-b21b86952a54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.472972] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff071af-86d0-4d8b-893c-d7bee5a5e30f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.483176] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fce52bf-b7aa-46c6-a064-1d12ace3ebcd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.519958] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e771ff5c-d20a-44fd-a209-1472ed570c9f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.531468] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75842470-98b5-4972-b6bd-64faf1e8e01f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.553119] env[63028]: DEBUG nova.compute.provider_tree [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.731991] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c0eb0318-b4ad-46b7-97d9-49268588965f tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "d663c2df-ae54-4c50-a70f-e2180700c700" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.446s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.843343] env[63028]: INFO nova.compute.manager [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Rebuilding instance [ 597.914267] env[63028]: DEBUG nova.compute.manager [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 597.915367] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41902366-8c29-451e-ba7b-172a00148846 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.945674] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734952, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.947543] env[63028]: DEBUG nova.network.neutron [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Successfully updated port: b8893183-7887-4add-9ad9-389b74e74c81 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 597.959324] env[63028]: DEBUG nova.compute.manager [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 597.989170] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b38f04-bb6d-5168-ce86-b21b86952a54, 'name': SearchDatastore_Task, 'duration_secs': 0.031094} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.992973] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-129ecf12-01eb-4e7a-be97-02557c45c54f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.005592] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 598.005592] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522f9484-bbe0-f498-b42c-5fd671008033" [ 598.005592] env[63028]: _type = "Task" [ 598.005592] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.009816] env[63028]: DEBUG nova.virt.hardware [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 598.009816] env[63028]: DEBUG nova.virt.hardware [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 598.009816] env[63028]: DEBUG nova.virt.hardware [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 598.010122] env[63028]: DEBUG nova.virt.hardware [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 598.010122] env[63028]: DEBUG nova.virt.hardware [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 598.010122] env[63028]: DEBUG nova.virt.hardware [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 598.010122] env[63028]: DEBUG nova.virt.hardware [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 598.010122] env[63028]: DEBUG nova.virt.hardware [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 598.010295] env[63028]: DEBUG nova.virt.hardware [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 598.010295] env[63028]: DEBUG nova.virt.hardware [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 598.010295] env[63028]: DEBUG nova.virt.hardware [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 598.011916] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782dbfb4-e483-4db7-a44d-b2734c520598 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.032205] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e75ed4-48d7-44e4-a143-61b65e365906 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.037271] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522f9484-bbe0-f498-b42c-5fd671008033, 'name': SearchDatastore_Task, 'duration_secs': 0.018999} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.043024] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.043024] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 5a330ed9-c106-49f2-b524-a424e717b5ce/5a330ed9-c106-49f2-b524-a424e717b5ce.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 598.044874] env[63028]: DEBUG nova.compute.manager [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Received event network-vif-plugged-6dc62708-050a-40f3-b99a-f51b25937806 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 598.045366] env[63028]: DEBUG oslo_concurrency.lockutils [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] Acquiring lock "5a330ed9-c106-49f2-b524-a424e717b5ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.045732] env[63028]: DEBUG oslo_concurrency.lockutils [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] Lock "5a330ed9-c106-49f2-b524-a424e717b5ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.046072] env[63028]: DEBUG oslo_concurrency.lockutils [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] Lock "5a330ed9-c106-49f2-b524-a424e717b5ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.046996] env[63028]: DEBUG nova.compute.manager [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] No waiting events found dispatching network-vif-plugged-6dc62708-050a-40f3-b99a-f51b25937806 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 598.048898] env[63028]: WARNING nova.compute.manager [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Received unexpected event network-vif-plugged-6dc62708-050a-40f3-b99a-f51b25937806 for instance with vm_state building and task_state spawning. [ 598.048898] env[63028]: DEBUG nova.compute.manager [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Received event network-changed-6dc62708-050a-40f3-b99a-f51b25937806 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 598.048898] env[63028]: DEBUG nova.compute.manager [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Refreshing instance network info cache due to event network-changed-6dc62708-050a-40f3-b99a-f51b25937806. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 598.048898] env[63028]: DEBUG oslo_concurrency.lockutils [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] Acquiring lock "refresh_cache-5a330ed9-c106-49f2-b524-a424e717b5ce" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.048898] env[63028]: DEBUG oslo_concurrency.lockutils [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] Acquired lock "refresh_cache-5a330ed9-c106-49f2-b524-a424e717b5ce" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.049581] env[63028]: DEBUG nova.network.neutron [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Refreshing network info cache for port 6dc62708-050a-40f3-b99a-f51b25937806 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 598.052763] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f5187e6-396b-46ef-8547-5288ba9fa603 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.064898] env[63028]: DEBUG nova.scheduler.client.report [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 598.069213] env[63028]: DEBUG nova.network.neutron [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Successfully created port: dbc0e58f-b646-4c47-becf-ba9c242ca9aa {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 598.077984] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 598.077984] env[63028]: value = "task-2734953" [ 598.077984] env[63028]: _type = "Task" [ 598.077984] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.093175] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734953, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.438031] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734952, 'name': ReconfigVM_Task, 'duration_secs': 0.728125} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.438031] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 0dbafad1-ab21-439d-bc8e-e447ac33304e/0dbafad1-ab21-439d-bc8e-e447ac33304e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 598.438873] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86520ee5-0aed-4f3c-95d6-14a97d418a3e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.450066] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 598.450066] env[63028]: value = "task-2734954" [ 598.450066] env[63028]: _type = "Task" [ 598.450066] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.454619] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Acquiring lock "refresh_cache-c9cc1ac7-06c6-415b-86ce-daf4849bfc05" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.454970] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Acquired lock "refresh_cache-c9cc1ac7-06c6-415b-86ce-daf4849bfc05" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.455728] env[63028]: DEBUG nova.network.neutron [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 598.470130] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734954, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.575115] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.653s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.575115] env[63028]: DEBUG nova.compute.manager [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 598.582020] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.281s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.584122] env[63028]: INFO nova.compute.claims [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.605995] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734953, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.943024] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 598.943966] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c61280a6-c61a-456d-9326-f39030f6099f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.956753] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Waiting for the task: (returnval){ [ 598.956753] env[63028]: value = "task-2734955" [ 598.956753] env[63028]: _type = "Task" [ 598.956753] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.968826] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734954, 'name': Rename_Task, 'duration_secs': 0.405663} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.969816] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 598.970710] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de3f64e6-8151-4b47-8ed3-66c47a790a79 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.982056] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734955, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.990472] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 598.990472] env[63028]: value = "task-2734956" [ 598.990472] env[63028]: _type = "Task" [ 598.990472] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.001592] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.051774] env[63028]: DEBUG nova.network.neutron [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.101643] env[63028]: DEBUG nova.compute.utils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 599.102041] env[63028]: DEBUG nova.compute.manager [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 599.102041] env[63028]: DEBUG nova.network.neutron [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 599.112788] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734953, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.84836} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.116300] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 5a330ed9-c106-49f2-b524-a424e717b5ce/5a330ed9-c106-49f2-b524-a424e717b5ce.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 599.119021] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 599.119021] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ff165e4-d661-4dfa-82e2-cf01c8ae9c9c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.133409] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 599.133409] env[63028]: value = "task-2734957" [ 599.133409] env[63028]: _type = "Task" [ 599.133409] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.146418] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734957, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.276042] env[63028]: DEBUG nova.policy [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5636d7b7a3d345a58a9ee343fecf2f97', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b056498f618493295359c1784b6660a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 599.468862] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734955, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.489465] env[63028]: DEBUG nova.network.neutron [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Updated VIF entry in instance network info cache for port 6dc62708-050a-40f3-b99a-f51b25937806. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 599.489817] env[63028]: DEBUG nova.network.neutron [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Updating instance_info_cache with network_info: [{"id": "6dc62708-050a-40f3-b99a-f51b25937806", "address": "fa:16:3e:1a:f4:86", "network": {"id": "350f4b14-d211-48c8-b1dd-06a0dd5805d1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-987689362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b3d1798e23e64325a3b6f699cd27d98f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6dc62708-05", "ovs_interfaceid": "6dc62708-050a-40f3-b99a-f51b25937806", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.506216] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734956, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.613622] env[63028]: DEBUG nova.compute.manager [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 599.647465] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734957, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114757} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.649409] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 599.650285] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f42c96a-2571-4fd2-8dc0-ae11b71e425d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.679689] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 5a330ed9-c106-49f2-b524-a424e717b5ce/5a330ed9-c106-49f2-b524-a424e717b5ce.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 599.680104] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5df59af-3821-4a58-82b4-74b805899197 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.715026] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 599.715026] env[63028]: value = "task-2734959" [ 599.715026] env[63028]: _type = "Task" [ 599.715026] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.726191] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734959, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.847786] env[63028]: DEBUG nova.network.neutron [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Updating instance_info_cache with network_info: [{"id": "b8893183-7887-4add-9ad9-389b74e74c81", "address": "fa:16:3e:03:0c:7d", "network": {"id": "5370dc93-87d0-455c-a888-c3313b4b6f9c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1189794082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4bc908c43a648f79f88812f86dddb6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8893183-78", "ovs_interfaceid": "b8893183-7887-4add-9ad9-389b74e74c81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.973958] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734955, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.997332] env[63028]: DEBUG oslo_concurrency.lockutils [req-c571cde5-9545-4e52-8fd2-ce89261a6c6e req-5beae841-073a-49eb-b184-a7c5766a23a4 service nova] Releasing lock "refresh_cache-5a330ed9-c106-49f2-b524-a424e717b5ce" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.004671] env[63028]: DEBUG oslo_vmware.api [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734956, 'name': PowerOnVM_Task, 'duration_secs': 0.818956} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.006633] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 600.006920] env[63028]: INFO nova.compute.manager [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Took 6.98 seconds to spawn the instance on the hypervisor. [ 600.007122] env[63028]: DEBUG nova.compute.manager [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 600.008039] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14920bfa-22e1-458d-b7b6-b2b106b8570e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.016212] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a143988-67e6-48b5-bb33-4dd873f93ec2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.027594] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f72fd8c-c4e9-41c2-b8eb-a413041333d8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.074637] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6818eed-1187-4f05-bb87-295f140082e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.086378] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce7bbe1-2514-4ce0-a1d9-5a9dfa52f863 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.111037] env[63028]: DEBUG nova.compute.provider_tree [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.225633] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734959, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.351738] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Releasing lock "refresh_cache-c9cc1ac7-06c6-415b-86ce-daf4849bfc05" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.352995] env[63028]: DEBUG nova.compute.manager [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Instance network_info: |[{"id": "b8893183-7887-4add-9ad9-389b74e74c81", "address": "fa:16:3e:03:0c:7d", "network": {"id": "5370dc93-87d0-455c-a888-c3313b4b6f9c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1189794082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4bc908c43a648f79f88812f86dddb6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8893183-78", "ovs_interfaceid": "b8893183-7887-4add-9ad9-389b74e74c81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 600.353162] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:0c:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9fa933df-d66f-485e-8cf9-eda7f1a7f283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8893183-7887-4add-9ad9-389b74e74c81', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 600.363286] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Creating folder: Project (f4bc908c43a648f79f88812f86dddb6c). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 600.363286] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-55239cd3-dd44-4870-99f6-03840d3ad31f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.378572] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Created folder: Project (f4bc908c43a648f79f88812f86dddb6c) in parent group-v550570. [ 600.378572] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Creating folder: Instances. Parent ref: group-v550618. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 600.378572] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd7d68dc-8241-474d-9b05-c2cd35aa528a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.394120] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Created folder: Instances in parent group-v550618. [ 600.396644] env[63028]: DEBUG oslo.service.loopingcall [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 600.396644] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 600.396644] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40ae3deb-b9ee-484a-9234-2d07e07a40fb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.419171] env[63028]: DEBUG nova.network.neutron [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Successfully created port: a290475c-c96a-4037-9a1f-e4340a86da15 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 600.430608] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 600.430608] env[63028]: value = "task-2734962" [ 600.430608] env[63028]: _type = "Task" [ 600.430608] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.440921] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734962, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.476130] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734955, 'name': PowerOffVM_Task, 'duration_secs': 1.158677} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.476130] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 600.476130] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 600.476768] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428819b7-e791-4a2d-9984-d982aa008c99 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.488875] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 600.489264] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e783485-b324-4a70-bfc3-e19ca9183512 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.524056] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 600.524280] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 600.524519] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Deleting the datastore file [datastore1] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 600.524720] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4aa48485-ec30-4ef8-b506-aafc61e69d24 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.540125] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Waiting for the task: (returnval){ [ 600.540125] env[63028]: value = "task-2734964" [ 600.540125] env[63028]: _type = "Task" [ 600.540125] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.550020] env[63028]: INFO nova.compute.manager [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Took 18.86 seconds to build instance. [ 600.556995] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734964, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.589885] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquiring lock "ddf20137-4d63-4c7a-b519-445719265e1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.594445] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lock "ddf20137-4d63-4c7a-b519-445719265e1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.005s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.614358] env[63028]: DEBUG nova.scheduler.client.report [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 600.625460] env[63028]: DEBUG nova.compute.manager [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 600.662580] env[63028]: DEBUG nova.virt.hardware [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 600.662849] env[63028]: DEBUG nova.virt.hardware [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.663064] env[63028]: DEBUG nova.virt.hardware [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 600.663240] env[63028]: DEBUG nova.virt.hardware [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.663394] env[63028]: DEBUG nova.virt.hardware [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 600.663545] env[63028]: DEBUG nova.virt.hardware [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 600.663858] env[63028]: DEBUG nova.virt.hardware [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 600.663921] env[63028]: DEBUG nova.virt.hardware [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 600.664075] env[63028]: DEBUG nova.virt.hardware [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 600.664251] env[63028]: DEBUG nova.virt.hardware [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 600.664427] env[63028]: DEBUG nova.virt.hardware [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 600.665846] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c9a2df-d6e0-4dca-981d-18b033748cf9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.677290] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca7b48e-101b-49f4-9e12-0ed2f9dc055e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.734025] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734959, 'name': ReconfigVM_Task, 'duration_secs': 0.617851} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.734025] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 5a330ed9-c106-49f2-b524-a424e717b5ce/5a330ed9-c106-49f2-b524-a424e717b5ce.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 600.734025] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01c5940a-9eb5-4f41-9ab4-f19168bc9f20 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.746221] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 600.746221] env[63028]: value = "task-2734965" [ 600.746221] env[63028]: _type = "Task" [ 600.746221] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.763525] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734965, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.780498] env[63028]: DEBUG nova.compute.manager [None req-ae330bb3-93c7-4294-8595-bc7fdc93b84a tempest-ServerDiagnosticsV248Test-1492762669 tempest-ServerDiagnosticsV248Test-1492762669-project-admin] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 600.781988] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14bef70-17dc-4e71-a245-9abcc4cac4cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.793113] env[63028]: INFO nova.compute.manager [None req-ae330bb3-93c7-4294-8595-bc7fdc93b84a tempest-ServerDiagnosticsV248Test-1492762669 tempest-ServerDiagnosticsV248Test-1492762669-project-admin] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Retrieving diagnostics [ 600.793975] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef221b59-bcc2-4611-967a-4a19cf76c231 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.948615] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734962, 'name': CreateVM_Task, 'duration_secs': 0.487194} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.948615] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 600.948615] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.948615] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.948615] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 600.948615] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8da9553-0c6f-4b87-8d8c-2421f4f62ad7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.954301] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 600.954301] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520b261b-2242-3534-7193-c7277222ddb7" [ 600.954301] env[63028]: _type = "Task" [ 600.954301] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.963398] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520b261b-2242-3534-7193-c7277222ddb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.053649] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70c27873-d5f4-48d7-97d8-490423e085b3 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "0dbafad1-ab21-439d-bc8e-e447ac33304e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.981s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.054871] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734964, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142539} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.054871] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 601.054994] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 601.055218] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 601.097721] env[63028]: DEBUG nova.compute.manager [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 601.105968] env[63028]: DEBUG nova.network.neutron [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Successfully updated port: dbc0e58f-b646-4c47-becf-ba9c242ca9aa {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 601.120148] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.539s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.122420] env[63028]: DEBUG nova.compute.manager [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 601.124861] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.329s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.124861] env[63028]: DEBUG nova.objects.instance [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Lazy-loading 'resources' on Instance uuid 679fca11-7390-4596-ab74-2f82a6cf8858 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 601.259092] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734965, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.467664] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520b261b-2242-3534-7193-c7277222ddb7, 'name': SearchDatastore_Task, 'duration_secs': 0.01613} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.468022] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.468227] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 601.468466] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.468659] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.468784] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 601.469139] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ebdabad-8bbe-46f4-bed5-38adbd9294a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.480374] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 601.480635] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 601.481581] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a5790d6-3221-4338-8763-462d686d0f98 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.493486] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 601.493486] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52843bf1-01df-d5ed-e5df-848c2ff8b153" [ 601.493486] env[63028]: _type = "Task" [ 601.493486] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.501089] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52843bf1-01df-d5ed-e5df-848c2ff8b153, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.614682] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.614682] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquired lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.615668] env[63028]: DEBUG nova.network.neutron [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 601.630196] env[63028]: DEBUG nova.compute.utils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 601.638445] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.638445] env[63028]: DEBUG nova.compute.manager [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 601.638445] env[63028]: DEBUG nova.network.neutron [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 601.707048] env[63028]: DEBUG nova.policy [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f5a1028ba464f2b980046e24a9d980a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '824cdcbbc0e1478b9d900cde707bc67b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 601.771609] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734965, 'name': Rename_Task, 'duration_secs': 0.747009} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.771906] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 601.772221] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c250b46-5bde-4a46-811c-f032a898886e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.783184] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 601.783184] env[63028]: value = "task-2734967" [ 601.783184] env[63028]: _type = "Task" [ 601.783184] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.797021] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734967, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.990377] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974607a7-0179-4ac4-8fe2-4be5c3551598 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.006074] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52843bf1-01df-d5ed-e5df-848c2ff8b153, 'name': SearchDatastore_Task, 'duration_secs': 0.015429} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.009185] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-912f333f-f4d9-4d4f-bb7e-14ae8d85e11b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.013307] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700ae6be-a56b-480e-9519-b33c394dae97 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.023087] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 602.023087] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52edee79-b4fe-3abb-f051-57c89720910f" [ 602.023087] env[63028]: _type = "Task" [ 602.023087] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.058481] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59a1787-cbb1-4105-ac85-a436b80aa31d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.065571] env[63028]: DEBUG nova.compute.manager [None req-79b6bfbd-a34c-4ac2-a0e7-30504d157986 tempest-ServerExternalEventsTest-1332495708 tempest-ServerExternalEventsTest-1332495708-project] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Received event network-changed {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 602.065782] env[63028]: DEBUG nova.compute.manager [None req-79b6bfbd-a34c-4ac2-a0e7-30504d157986 tempest-ServerExternalEventsTest-1332495708 tempest-ServerExternalEventsTest-1332495708-project] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Refreshing instance network info cache due to event network-changed. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 602.066137] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79b6bfbd-a34c-4ac2-a0e7-30504d157986 tempest-ServerExternalEventsTest-1332495708 tempest-ServerExternalEventsTest-1332495708-project] Acquiring lock "refresh_cache-67440140-a619-41f2-98fe-eff23e8ad8a5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.066298] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79b6bfbd-a34c-4ac2-a0e7-30504d157986 tempest-ServerExternalEventsTest-1332495708 tempest-ServerExternalEventsTest-1332495708-project] Acquired lock "refresh_cache-67440140-a619-41f2-98fe-eff23e8ad8a5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.066462] env[63028]: DEBUG nova.network.neutron [None req-79b6bfbd-a34c-4ac2-a0e7-30504d157986 tempest-ServerExternalEventsTest-1332495708 tempest-ServerExternalEventsTest-1332495708-project] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 602.076284] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52edee79-b4fe-3abb-f051-57c89720910f, 'name': SearchDatastore_Task, 'duration_secs': 0.014744} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.079818] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4adeee68-a81f-4a31-a153-b30bf5959d43 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.084366] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.085032] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c9cc1ac7-06c6-415b-86ce-daf4849bfc05/c9cc1ac7-06c6-415b-86ce-daf4849bfc05.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 602.085546] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5443e5a-7102-462f-820f-be0e74cc3559 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.102110] env[63028]: DEBUG nova.compute.provider_tree [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.111044] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 602.111044] env[63028]: value = "task-2734968" [ 602.111044] env[63028]: _type = "Task" [ 602.111044] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.126088] env[63028]: DEBUG nova.virt.hardware [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 602.126404] env[63028]: DEBUG nova.virt.hardware [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.126644] env[63028]: DEBUG nova.virt.hardware [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 602.126867] env[63028]: DEBUG nova.virt.hardware [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.127136] env[63028]: DEBUG nova.virt.hardware [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 602.127315] env[63028]: DEBUG nova.virt.hardware [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 602.127979] env[63028]: DEBUG nova.virt.hardware [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 602.127979] env[63028]: DEBUG nova.virt.hardware [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 602.127979] env[63028]: DEBUG nova.virt.hardware [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 602.128235] env[63028]: DEBUG nova.virt.hardware [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 602.129025] env[63028]: DEBUG nova.virt.hardware [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 602.135302] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ab4f57-790a-4136-ae40-e90ea37210a5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.137248] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734968, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.138564] env[63028]: DEBUG nova.compute.manager [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 602.151253] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae1b372-dc77-4cce-9523-07b1eb95e9f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.169789] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Instance VIF info [] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 602.176351] env[63028]: DEBUG oslo.service.loopingcall [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 602.176351] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 602.177154] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-743d9a38-3e20-44cb-a3db-726dc38f1b8c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.197303] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquiring lock "2ae111f7-4eaa-46c2-ab97-907daa913834" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.197519] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lock "2ae111f7-4eaa-46c2-ab97-907daa913834" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.203725] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 602.203725] env[63028]: value = "task-2734969" [ 602.203725] env[63028]: _type = "Task" [ 602.203725] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.214126] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734969, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.226216] env[63028]: DEBUG nova.network.neutron [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.298084] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734967, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.486106] env[63028]: DEBUG nova.network.neutron [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Successfully created port: 4fb8a759-76bd-4b37-a810-2665ea4a32b3 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.612852] env[63028]: DEBUG nova.scheduler.client.report [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 602.630025] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734968, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.700302] env[63028]: DEBUG nova.compute.manager [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 602.722790] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734969, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.781241] env[63028]: DEBUG nova.network.neutron [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Updating instance_info_cache with network_info: [{"id": "dbc0e58f-b646-4c47-becf-ba9c242ca9aa", "address": "fa:16:3e:f3:d3:54", "network": {"id": "5f8a710c-b049-40fe-8cc4-00c97e954fd9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1979325486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "381de553d9da4c94b923d790c12a28a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbc0e58f-b6", "ovs_interfaceid": "dbc0e58f-b646-4c47-becf-ba9c242ca9aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.803051] env[63028]: DEBUG oslo_vmware.api [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2734967, 'name': PowerOnVM_Task, 'duration_secs': 0.652984} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.803051] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 602.803051] env[63028]: INFO nova.compute.manager [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Took 12.23 seconds to spawn the instance on the hypervisor. [ 602.803051] env[63028]: DEBUG nova.compute.manager [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 602.803051] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87210556-fed3-4aba-a843-6c991eefd337 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.119705] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.996s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.123227] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.678s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.125988] env[63028]: INFO nova.compute.claims [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.145149] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734968, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600223} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.149605] env[63028]: INFO nova.scheduler.client.report [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Deleted allocations for instance 679fca11-7390-4596-ab74-2f82a6cf8858 [ 603.149942] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c9cc1ac7-06c6-415b-86ce-daf4849bfc05/c9cc1ac7-06c6-415b-86ce-daf4849bfc05.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 603.150633] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 603.152184] env[63028]: DEBUG nova.compute.manager [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 603.157089] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4c4a676-169d-427e-b71d-65413991d813 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.169375] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 603.169375] env[63028]: value = "task-2734970" [ 603.169375] env[63028]: _type = "Task" [ 603.169375] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.193450] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734970, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.196952] env[63028]: DEBUG nova.virt.hardware [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 603.197674] env[63028]: DEBUG nova.virt.hardware [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.197674] env[63028]: DEBUG nova.virt.hardware [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 603.197674] env[63028]: DEBUG nova.virt.hardware [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.197877] env[63028]: DEBUG nova.virt.hardware [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 603.197916] env[63028]: DEBUG nova.virt.hardware [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 603.198148] env[63028]: DEBUG nova.virt.hardware [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 603.198336] env[63028]: DEBUG nova.virt.hardware [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 603.198550] env[63028]: DEBUG nova.virt.hardware [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 603.198670] env[63028]: DEBUG nova.virt.hardware [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 603.198866] env[63028]: DEBUG nova.virt.hardware [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 603.200610] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7576729e-6716-4b9c-8abe-ee12b5700712 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.219648] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82dc8c40-455e-4722-9d14-14df76a23532 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.228248] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734969, 'name': CreateVM_Task, 'duration_secs': 0.591477} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.228520] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 603.228783] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.228974] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.229400] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 603.230454] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02231273-ca21-4e60-b64c-873fb5f59a91 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.242734] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.247307] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Waiting for the task: (returnval){ [ 603.247307] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52254b79-6539-5a9f-0312-b768da889bbf" [ 603.247307] env[63028]: _type = "Task" [ 603.247307] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.261674] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52254b79-6539-5a9f-0312-b768da889bbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.289440] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Releasing lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.290031] env[63028]: DEBUG nova.compute.manager [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Instance network_info: |[{"id": "dbc0e58f-b646-4c47-becf-ba9c242ca9aa", "address": "fa:16:3e:f3:d3:54", "network": {"id": "5f8a710c-b049-40fe-8cc4-00c97e954fd9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1979325486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "381de553d9da4c94b923d790c12a28a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbc0e58f-b6", "ovs_interfaceid": "dbc0e58f-b646-4c47-becf-ba9c242ca9aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 603.290232] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:d3:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dbc0e58f-b646-4c47-becf-ba9c242ca9aa', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 603.298834] env[63028]: DEBUG oslo.service.loopingcall [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 603.299531] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 603.299760] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66e8fe06-d273-4d22-abb7-1421ed3717ba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.329961] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 603.329961] env[63028]: value = "task-2734971" [ 603.329961] env[63028]: _type = "Task" [ 603.329961] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.332922] env[63028]: INFO nova.compute.manager [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Took 22.10 seconds to build instance. [ 603.339654] env[63028]: DEBUG nova.network.neutron [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Successfully updated port: a290475c-c96a-4037-9a1f-e4340a86da15 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 603.344959] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734971, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.543997] env[63028]: DEBUG nova.compute.manager [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Received event network-vif-plugged-b8893183-7887-4add-9ad9-389b74e74c81 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 603.546292] env[63028]: DEBUG oslo_concurrency.lockutils [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] Acquiring lock "c9cc1ac7-06c6-415b-86ce-daf4849bfc05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.546607] env[63028]: DEBUG oslo_concurrency.lockutils [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] Lock "c9cc1ac7-06c6-415b-86ce-daf4849bfc05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.546838] env[63028]: DEBUG oslo_concurrency.lockutils [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] Lock "c9cc1ac7-06c6-415b-86ce-daf4849bfc05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.547028] env[63028]: DEBUG nova.compute.manager [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] No waiting events found dispatching network-vif-plugged-b8893183-7887-4add-9ad9-389b74e74c81 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 603.547205] env[63028]: WARNING nova.compute.manager [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Received unexpected event network-vif-plugged-b8893183-7887-4add-9ad9-389b74e74c81 for instance with vm_state building and task_state spawning. [ 603.547370] env[63028]: DEBUG nova.compute.manager [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Received event network-changed-b8893183-7887-4add-9ad9-389b74e74c81 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 603.547526] env[63028]: DEBUG nova.compute.manager [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Refreshing instance network info cache due to event network-changed-b8893183-7887-4add-9ad9-389b74e74c81. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 603.547712] env[63028]: DEBUG oslo_concurrency.lockutils [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] Acquiring lock "refresh_cache-c9cc1ac7-06c6-415b-86ce-daf4849bfc05" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.547869] env[63028]: DEBUG oslo_concurrency.lockutils [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] Acquired lock "refresh_cache-c9cc1ac7-06c6-415b-86ce-daf4849bfc05" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.548050] env[63028]: DEBUG nova.network.neutron [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Refreshing network info cache for port b8893183-7887-4add-9ad9-389b74e74c81 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 603.563133] env[63028]: DEBUG nova.network.neutron [None req-79b6bfbd-a34c-4ac2-a0e7-30504d157986 tempest-ServerExternalEventsTest-1332495708 tempest-ServerExternalEventsTest-1332495708-project] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Updating instance_info_cache with network_info: [{"id": "f7a43b4b-f49a-4b79-b488-55fd4852195c", "address": "fa:16:3e:ac:e7:06", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7a43b4b-f4", "ovs_interfaceid": "f7a43b4b-f49a-4b79-b488-55fd4852195c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.665258] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0ae73018-2ad8-453d-b0a6-c1359a822bd9 tempest-DeleteServersAdminTestJSON-1646188334 tempest-DeleteServersAdminTestJSON-1646188334-project-admin] Lock "679fca11-7390-4596-ab74-2f82a6cf8858" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.470s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.689057] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734970, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082209} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.691112] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 603.691874] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279c3b47-289c-4260-abe0-6103196642df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.721545] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] c9cc1ac7-06c6-415b-86ce-daf4849bfc05/c9cc1ac7-06c6-415b-86ce-daf4849bfc05.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 603.722312] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1f931b8-e151-4a6b-ba59-6967943949f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.748413] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 603.748413] env[63028]: value = "task-2734973" [ 603.748413] env[63028]: _type = "Task" [ 603.748413] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.761695] env[63028]: DEBUG nova.compute.manager [req-fe2aabae-5046-4b28-88ee-137af7c72d14 req-fa414a4b-fa9b-4ee3-977e-03142346475f service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Received event network-vif-plugged-dbc0e58f-b646-4c47-becf-ba9c242ca9aa {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 603.761695] env[63028]: DEBUG oslo_concurrency.lockutils [req-fe2aabae-5046-4b28-88ee-137af7c72d14 req-fa414a4b-fa9b-4ee3-977e-03142346475f service nova] Acquiring lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.761695] env[63028]: DEBUG oslo_concurrency.lockutils [req-fe2aabae-5046-4b28-88ee-137af7c72d14 req-fa414a4b-fa9b-4ee3-977e-03142346475f service nova] Lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.761695] env[63028]: DEBUG oslo_concurrency.lockutils [req-fe2aabae-5046-4b28-88ee-137af7c72d14 req-fa414a4b-fa9b-4ee3-977e-03142346475f service nova] Lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.761695] env[63028]: DEBUG nova.compute.manager [req-fe2aabae-5046-4b28-88ee-137af7c72d14 req-fa414a4b-fa9b-4ee3-977e-03142346475f service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] No waiting events found dispatching network-vif-plugged-dbc0e58f-b646-4c47-becf-ba9c242ca9aa {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 603.762017] env[63028]: WARNING nova.compute.manager [req-fe2aabae-5046-4b28-88ee-137af7c72d14 req-fa414a4b-fa9b-4ee3-977e-03142346475f service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Received unexpected event network-vif-plugged-dbc0e58f-b646-4c47-becf-ba9c242ca9aa for instance with vm_state building and task_state spawning. [ 603.769225] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52254b79-6539-5a9f-0312-b768da889bbf, 'name': SearchDatastore_Task, 'duration_secs': 0.021014} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.772978] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.772978] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 603.772978] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.772978] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.773293] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 603.773653] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734973, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.774191] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34213012-4f0f-4e87-adcd-fea2f451c3c5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.786715] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 603.786968] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 603.787801] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b1e5b79-3d81-47eb-bb9b-1660c838f030 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.794771] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Waiting for the task: (returnval){ [ 603.794771] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5202f3bb-8397-db0e-9c79-c93e08907a36" [ 603.794771] env[63028]: _type = "Task" [ 603.794771] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.804976] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5202f3bb-8397-db0e-9c79-c93e08907a36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.835751] env[63028]: DEBUG oslo_concurrency.lockutils [None req-19bb96ef-6a90-46d6-b14e-187dceb20ef8 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "5a330ed9-c106-49f2-b524-a424e717b5ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.745s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.843970] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734971, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.846781] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Acquiring lock "refresh_cache-4a782483-c24e-44db-b697-856c69cc4a13" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.846968] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Acquired lock "refresh_cache-4a782483-c24e-44db-b697-856c69cc4a13" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.847133] env[63028]: DEBUG nova.network.neutron [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 604.067723] env[63028]: DEBUG oslo_concurrency.lockutils [None req-79b6bfbd-a34c-4ac2-a0e7-30504d157986 tempest-ServerExternalEventsTest-1332495708 tempest-ServerExternalEventsTest-1332495708-project] Releasing lock "refresh_cache-67440140-a619-41f2-98fe-eff23e8ad8a5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.170818] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Acquiring lock "99886410-ec47-46ad-9d07-ee3593006997" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.171340] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Lock "99886410-ec47-46ad-9d07-ee3593006997" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.267782] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734973, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.307673] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5202f3bb-8397-db0e-9c79-c93e08907a36, 'name': SearchDatastore_Task, 'duration_secs': 0.020685} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.309302] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f3cfb9d-7df9-4385-af3d-6b11a64c8b48 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.320341] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Waiting for the task: (returnval){ [ 604.320341] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52771cf8-ab4b-dbf8-b3ef-22747af66f2d" [ 604.320341] env[63028]: _type = "Task" [ 604.320341] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.354534] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52771cf8-ab4b-dbf8-b3ef-22747af66f2d, 'name': SearchDatastore_Task, 'duration_secs': 0.01492} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.360570] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.360878] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1/94b1bf30-0f9b-4197-99ff-6631a13ab2d1.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 604.361394] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734971, 'name': CreateVM_Task, 'duration_secs': 0.529791} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.361866] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b7610c57-2c0e-45ae-89cc-9fd314d90037 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.364562] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 604.365280] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.365440] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.366196] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 604.366471] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8284e095-b88f-47ee-b6f9-e447b233bcf6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.373312] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 604.373312] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527d3088-49ce-75fb-46be-0ba687cd9fdc" [ 604.373312] env[63028]: _type = "Task" [ 604.373312] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.378194] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Waiting for the task: (returnval){ [ 604.378194] env[63028]: value = "task-2734974" [ 604.378194] env[63028]: _type = "Task" [ 604.378194] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.386841] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527d3088-49ce-75fb-46be-0ba687cd9fdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.395094] env[63028]: DEBUG nova.network.neutron [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Updated VIF entry in instance network info cache for port b8893183-7887-4add-9ad9-389b74e74c81. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 604.395457] env[63028]: DEBUG nova.network.neutron [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Updating instance_info_cache with network_info: [{"id": "b8893183-7887-4add-9ad9-389b74e74c81", "address": "fa:16:3e:03:0c:7d", "network": {"id": "5370dc93-87d0-455c-a888-c3313b4b6f9c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1189794082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4bc908c43a648f79f88812f86dddb6c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9fa933df-d66f-485e-8cf9-eda7f1a7f283", "external-id": "nsx-vlan-transportzone-87", "segmentation_id": 87, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8893183-78", "ovs_interfaceid": "b8893183-7887-4add-9ad9-389b74e74c81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.400968] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.424450] env[63028]: DEBUG nova.network.neutron [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.578982] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7c821b-835b-4e6e-9350-7fef659059b5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.591316] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf288959-a352-4608-8c43-2ba49f129423 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.634252] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48374cd0-0fee-436f-93b2-6e75cad88593 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.647210] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4a2736-4b66-4ba7-a0cb-401b77bc2e6e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.664390] env[63028]: DEBUG nova.compute.provider_tree [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 604.674178] env[63028]: DEBUG nova.compute.manager [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 604.764972] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734973, 'name': ReconfigVM_Task, 'duration_secs': 0.599763} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.765234] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Reconfigured VM instance instance-0000000f to attach disk [datastore2] c9cc1ac7-06c6-415b-86ce-daf4849bfc05/c9cc1ac7-06c6-415b-86ce-daf4849bfc05.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 604.765877] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-710d1c4e-8266-4455-88d6-db1b217449ab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.776865] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 604.776865] env[63028]: value = "task-2734975" [ 604.776865] env[63028]: _type = "Task" [ 604.776865] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.795491] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734975, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.888252] env[63028]: DEBUG nova.network.neutron [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Updating instance_info_cache with network_info: [{"id": "a290475c-c96a-4037-9a1f-e4340a86da15", "address": "fa:16:3e:0a:4f:a0", "network": {"id": "eea88aa4-8c75-4cce-b1a6-7b2e64245351", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-868430466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b056498f618493295359c1784b6660a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa290475c-c9", "ovs_interfaceid": "a290475c-c96a-4037-9a1f-e4340a86da15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.894813] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527d3088-49ce-75fb-46be-0ba687cd9fdc, 'name': SearchDatastore_Task, 'duration_secs': 0.015887} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.897258] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.897258] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 604.897427] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.897825] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.898334] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 604.902668] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b83bc0a9-46cc-4f4d-988a-ad85e114b4af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.905996] env[63028]: DEBUG oslo_concurrency.lockutils [req-47fe5cc2-caa2-40e5-b226-8cf02bf77e78 req-8585ce06-db30-4c8e-84d7-9841d78d7e85 service nova] Releasing lock "refresh_cache-c9cc1ac7-06c6-415b-86ce-daf4849bfc05" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.906703] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734974, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.924039] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 604.924328] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 604.925211] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cedad030-a331-4348-b9f5-9988309928d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.933780] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 604.933780] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524491f9-33f2-65d5-6411-6878f73c98ec" [ 604.933780] env[63028]: _type = "Task" [ 604.933780] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.944499] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524491f9-33f2-65d5-6411-6878f73c98ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.961892] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Acquiring lock "8f6beda6-0fc6-4d85-9f27-f4248adda8f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.962051] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Lock "8f6beda6-0fc6-4d85-9f27-f4248adda8f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.133719] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Acquiring lock "1eeb96d1-6e03-4192-a9db-955444519fd7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.133864] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Lock "1eeb96d1-6e03-4192-a9db-955444519fd7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.134337] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Acquiring lock "1eeb96d1-6e03-4192-a9db-955444519fd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.134337] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Lock "1eeb96d1-6e03-4192-a9db-955444519fd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.134941] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Lock "1eeb96d1-6e03-4192-a9db-955444519fd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.141089] env[63028]: INFO nova.compute.manager [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Terminating instance [ 605.157049] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquiring lock "0e07a6cd-8c99-408d-95ba-63f7839c327f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.157306] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lock "0e07a6cd-8c99-408d-95ba-63f7839c327f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.166805] env[63028]: DEBUG nova.network.neutron [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Successfully updated port: 4fb8a759-76bd-4b37-a810-2665ea4a32b3 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 605.195816] env[63028]: ERROR nova.scheduler.client.report [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [req-dc0f0d5d-f0ad-4731-a4ac-e1a0f350c6c7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dc0f0d5d-f0ad-4731-a4ac-e1a0f350c6c7"}]} [ 605.199342] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.216205] env[63028]: DEBUG nova.scheduler.client.report [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Refreshing inventories for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 605.236998] env[63028]: DEBUG nova.scheduler.client.report [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Updating ProviderTree inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 605.237322] env[63028]: DEBUG nova.compute.provider_tree [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 605.256022] env[63028]: DEBUG nova.scheduler.client.report [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Refreshing aggregate associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, aggregates: None {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 605.283798] env[63028]: DEBUG nova.scheduler.client.report [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Refreshing trait associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 605.290303] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734975, 'name': Rename_Task, 'duration_secs': 0.308958} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.291733] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 605.291733] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32398e2f-ff8b-4137-b1ab-fa0e2bfb52f6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.298680] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 605.298680] env[63028]: value = "task-2734976" [ 605.298680] env[63028]: _type = "Task" [ 605.298680] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.310954] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734976, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.400132] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Releasing lock "refresh_cache-4a782483-c24e-44db-b697-856c69cc4a13" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.400942] env[63028]: DEBUG nova.compute.manager [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Instance network_info: |[{"id": "a290475c-c96a-4037-9a1f-e4340a86da15", "address": "fa:16:3e:0a:4f:a0", "network": {"id": "eea88aa4-8c75-4cce-b1a6-7b2e64245351", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-868430466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b056498f618493295359c1784b6660a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa290475c-c9", "ovs_interfaceid": "a290475c-c96a-4037-9a1f-e4340a86da15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 605.401133] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734974, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671966} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.401559] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:4f:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a290475c-c96a-4037-9a1f-e4340a86da15', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 605.409892] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Creating folder: Project (3b056498f618493295359c1784b6660a). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 605.410261] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1/94b1bf30-0f9b-4197-99ff-6631a13ab2d1.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 605.410474] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 605.413498] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2b266ce-c688-406c-9bdb-e6938595090d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.415369] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-639deeb5-261d-4dab-bb03-00c48be96235 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.424626] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Waiting for the task: (returnval){ [ 605.424626] env[63028]: value = "task-2734978" [ 605.424626] env[63028]: _type = "Task" [ 605.424626] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.438244] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Created folder: Project (3b056498f618493295359c1784b6660a) in parent group-v550570. [ 605.438559] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Creating folder: Instances. Parent ref: group-v550623. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 605.449361] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b59a212-608c-47bb-aee6-89b222c8e711 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.452290] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734978, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.463755] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524491f9-33f2-65d5-6411-6878f73c98ec, 'name': SearchDatastore_Task, 'duration_secs': 0.058042} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.464279] env[63028]: DEBUG nova.compute.manager [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 605.470449] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Created folder: Instances in parent group-v550623. [ 605.470765] env[63028]: DEBUG oslo.service.loopingcall [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.477934] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ab6e467-48ba-4cbd-8bf8-08cf0c2e4815 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.477934] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 605.477934] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-61885e73-efc5-4b97-84f8-44a8102ec928 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.503074] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 605.503074] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529070af-60d5-fa58-4869-fd3059d1bc09" [ 605.503074] env[63028]: _type = "Task" [ 605.503074] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.505859] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 605.505859] env[63028]: value = "task-2734980" [ 605.505859] env[63028]: _type = "Task" [ 605.505859] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.517051] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Acquiring lock "67440140-a619-41f2-98fe-eff23e8ad8a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.517400] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Lock "67440140-a619-41f2-98fe-eff23e8ad8a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.517635] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Acquiring lock "67440140-a619-41f2-98fe-eff23e8ad8a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.517858] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Lock "67440140-a619-41f2-98fe-eff23e8ad8a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.518040] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Lock "67440140-a619-41f2-98fe-eff23e8ad8a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.529978] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529070af-60d5-fa58-4869-fd3059d1bc09, 'name': SearchDatastore_Task, 'duration_secs': 0.016102} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.534965] env[63028]: INFO nova.compute.manager [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Terminating instance [ 605.536934] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.536934] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c7a3f2c6-8368-49cc-9737-ea1d836f1783/c7a3f2c6-8368-49cc-9737-ea1d836f1783.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 605.537515] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734980, 'name': CreateVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.538988] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-996c4389-bfe2-40d2-aafd-e281bf20acec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.554038] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 605.554038] env[63028]: value = "task-2734982" [ 605.554038] env[63028]: _type = "Task" [ 605.554038] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.565330] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734982, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.645543] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Acquiring lock "refresh_cache-1eeb96d1-6e03-4192-a9db-955444519fd7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.645698] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Acquired lock "refresh_cache-1eeb96d1-6e03-4192-a9db-955444519fd7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.645913] env[63028]: DEBUG nova.network.neutron [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.675204] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquiring lock "refresh_cache-44fca05f-51db-4252-bcf8-6bcad37a6147" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.675204] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquired lock "refresh_cache-44fca05f-51db-4252-bcf8-6bcad37a6147" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.675204] env[63028]: DEBUG nova.network.neutron [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.743578] env[63028]: DEBUG oslo_vmware.rw_handles [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d9c42c-12f3-6631-7239-fac17995e4a1/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 605.743578] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87347ebb-f240-4e56-9cb8-7bb29e588f78 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.756203] env[63028]: DEBUG oslo_vmware.rw_handles [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d9c42c-12f3-6631-7239-fac17995e4a1/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 605.756203] env[63028]: ERROR oslo_vmware.rw_handles [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d9c42c-12f3-6631-7239-fac17995e4a1/disk-0.vmdk due to incomplete transfer. [ 605.756203] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-88106743-5a8c-4ec0-8791-d541935c52cd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.770445] env[63028]: DEBUG oslo_vmware.rw_handles [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d9c42c-12f3-6631-7239-fac17995e4a1/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 605.770445] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Uploaded image b601ac28-7c34-45b7-b134-cb9218471bed to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 605.773265] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 605.773566] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8798bd76-33fe-4dc7-b1f9-f0a6edde2b0e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.782666] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 605.782666] env[63028]: value = "task-2734983" [ 605.782666] env[63028]: _type = "Task" [ 605.782666] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.792807] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734983, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.795944] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e34bc3e-cbb2-4e55-9ac0-50055edd9e52 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.808290] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a68e0c-5642-455f-a7d0-970da9455e82 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.815033] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734976, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.851426] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5120a2b9-b23b-4931-8862-98388c7edb7a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.860475] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fabf4da-a1a7-403b-99ca-12942f30811e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.881387] env[63028]: DEBUG nova.compute.provider_tree [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 605.941541] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734978, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079938} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.941841] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 605.942759] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881aa4ae-b768-4817-aea4-502c6a399bed {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.971510] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1/94b1bf30-0f9b-4197-99ff-6631a13ab2d1.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 605.976040] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e03f3e1a-9cd7-4d7d-bece-31fd82de10dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.009402] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Waiting for the task: (returnval){ [ 606.009402] env[63028]: value = "task-2734984" [ 606.009402] env[63028]: _type = "Task" [ 606.009402] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.018562] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.026659] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734984, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.034425] env[63028]: INFO nova.compute.manager [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Rebuilding instance [ 606.034425] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734980, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.043492] env[63028]: DEBUG nova.compute.manager [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 606.043772] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 606.045076] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5bc7c7-0be3-4a5f-a793-83a9bea851c4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.067160] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 606.067952] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55e89237-b37a-488a-902e-28d7c866e218 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.079494] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734982, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.080917] env[63028]: DEBUG oslo_vmware.api [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Waiting for the task: (returnval){ [ 606.080917] env[63028]: value = "task-2734985" [ 606.080917] env[63028]: _type = "Task" [ 606.080917] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.102401] env[63028]: DEBUG oslo_vmware.api [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.115712] env[63028]: DEBUG nova.compute.manager [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 606.116704] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e32db7a-e110-49e9-990e-2557f064fa22 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.174303] env[63028]: DEBUG nova.network.neutron [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.239405] env[63028]: DEBUG nova.network.neutron [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.295227] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734983, 'name': Destroy_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.304188] env[63028]: DEBUG nova.network.neutron [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.313554] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734976, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.409520] env[63028]: DEBUG nova.network.neutron [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Updating instance_info_cache with network_info: [{"id": "4fb8a759-76bd-4b37-a810-2665ea4a32b3", "address": "fa:16:3e:6b:a2:3a", "network": {"id": "3a62b3ba-178e-4e4a-b8ce-a36e2a779888", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1226406934-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "824cdcbbc0e1478b9d900cde707bc67b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fb8a759-76", "ovs_interfaceid": "4fb8a759-76bd-4b37-a810-2665ea4a32b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.434101] env[63028]: DEBUG nova.scheduler.client.report [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Updated inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with generation 29 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 606.434380] env[63028]: DEBUG nova.compute.provider_tree [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 29 to 30 during operation: update_inventory {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 606.434559] env[63028]: DEBUG nova.compute.provider_tree [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 606.531538] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734980, 'name': CreateVM_Task, 'duration_secs': 0.578763} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.535879] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 606.536313] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734984, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.537125] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.537356] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.538216] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 606.538216] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3dc4fa8-e5fb-48fc-b542-4267d6d4cbd8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.544372] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Waiting for the task: (returnval){ [ 606.544372] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52299a2e-8ba9-77e2-0fa7-6665665305b8" [ 606.544372] env[63028]: _type = "Task" [ 606.544372] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.555071] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52299a2e-8ba9-77e2-0fa7-6665665305b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.565849] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734982, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.633572} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.565849] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c7a3f2c6-8368-49cc-9737-ea1d836f1783/c7a3f2c6-8368-49cc-9737-ea1d836f1783.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 606.566146] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 606.566219] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95328342-7a39-42f9-8e4c-636349d11b93 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.577953] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 606.577953] env[63028]: value = "task-2734986" [ 606.577953] env[63028]: _type = "Task" [ 606.577953] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.594882] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734986, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.598449] env[63028]: DEBUG oslo_vmware.api [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734985, 'name': PowerOffVM_Task, 'duration_secs': 0.481941} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.598605] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 606.598744] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 606.599035] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04bd71f2-9bf9-4248-be38-f2a5b2d09c3f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.800874] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734983, 'name': Destroy_Task, 'duration_secs': 0.752928} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.800874] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Destroyed the VM [ 606.800874] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 606.800874] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c777d7aa-1bfe-4978-9ddd-5c921772350f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.806288] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Releasing lock "refresh_cache-1eeb96d1-6e03-4192-a9db-955444519fd7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.808059] env[63028]: DEBUG nova.compute.manager [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 606.808059] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 606.808286] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0dc4fe-e228-4426-9d96-9b0179865584 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.815929] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 606.815929] env[63028]: value = "task-2734988" [ 606.815929] env[63028]: _type = "Task" [ 606.815929] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.828163] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 606.828575] env[63028]: DEBUG oslo_vmware.api [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2734976, 'name': PowerOnVM_Task, 'duration_secs': 1.461775} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.832324] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3eec9098-0b4d-4d95-8a10-a337289b9013 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.833776] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 606.834573] env[63028]: INFO nova.compute.manager [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Took 11.36 seconds to spawn the instance on the hypervisor. [ 606.834573] env[63028]: DEBUG nova.compute.manager [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 606.836898] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e157e48b-8f01-48ba-a004-bb4d30ab2d01 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.838897] env[63028]: INFO nova.compute.manager [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Rescuing [ 606.839203] env[63028]: DEBUG oslo_concurrency.lockutils [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "refresh_cache-5a330ed9-c106-49f2-b524-a424e717b5ce" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.840042] env[63028]: DEBUG oslo_concurrency.lockutils [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired lock "refresh_cache-5a330ed9-c106-49f2-b524-a424e717b5ce" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.840042] env[63028]: DEBUG nova.network.neutron [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.845262] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734988, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.847242] env[63028]: DEBUG oslo_vmware.api [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Waiting for the task: (returnval){ [ 606.847242] env[63028]: value = "task-2734989" [ 606.847242] env[63028]: _type = "Task" [ 606.847242] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.864297] env[63028]: DEBUG oslo_vmware.api [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734989, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.910935] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Releasing lock "refresh_cache-44fca05f-51db-4252-bcf8-6bcad37a6147" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.911284] env[63028]: DEBUG nova.compute.manager [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Instance network_info: |[{"id": "4fb8a759-76bd-4b37-a810-2665ea4a32b3", "address": "fa:16:3e:6b:a2:3a", "network": {"id": "3a62b3ba-178e-4e4a-b8ce-a36e2a779888", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1226406934-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "824cdcbbc0e1478b9d900cde707bc67b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fb8a759-76", "ovs_interfaceid": "4fb8a759-76bd-4b37-a810-2665ea4a32b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 606.911681] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:a2:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37fb1918-d178-4e12-93e6-316381e78be4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4fb8a759-76bd-4b37-a810-2665ea4a32b3', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 606.919207] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Creating folder: Project (824cdcbbc0e1478b9d900cde707bc67b). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.919904] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-794bd6a7-015e-4188-be1c-6590ca95a6b7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.933440] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Created folder: Project (824cdcbbc0e1478b9d900cde707bc67b) in parent group-v550570. [ 606.933702] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Creating folder: Instances. Parent ref: group-v550627. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.934113] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-686bffb6-a4c8-40e8-bf0e-77fd6a7ae981 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.939910] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.816s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.940188] env[63028]: DEBUG nova.compute.manager [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 606.942876] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.356s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.943128] env[63028]: DEBUG nova.objects.instance [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Lazy-loading 'resources' on Instance uuid a167df01-05e4-453d-8800-9c104d912474 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 606.947012] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Created folder: Instances in parent group-v550627. [ 606.947371] env[63028]: DEBUG oslo.service.loopingcall [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 606.947492] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 606.947616] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e80c40ac-91ad-4f20-b292-570569f64794 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.972732] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 606.972732] env[63028]: value = "task-2734992" [ 606.972732] env[63028]: _type = "Task" [ 606.972732] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.984438] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734992, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.024274] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734984, 'name': ReconfigVM_Task, 'duration_secs': 0.806104} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.024645] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1/94b1bf30-0f9b-4197-99ff-6631a13ab2d1.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 607.026703] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2cfb0e67-d7d5-49ce-ab9a-166e09a43c7d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.035596] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Waiting for the task: (returnval){ [ 607.035596] env[63028]: value = "task-2734993" [ 607.035596] env[63028]: _type = "Task" [ 607.035596] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.046685] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734993, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.056396] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52299a2e-8ba9-77e2-0fa7-6665665305b8, 'name': SearchDatastore_Task, 'duration_secs': 0.011578} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.057829] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.058220] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 607.058357] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.058478] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.058652] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 607.060416] env[63028]: DEBUG nova.compute.manager [req-317b5ef0-58c6-4f68-b3e3-9eda0100317c req-47006956-1f0e-4b6b-a311-fb9843b9a91a service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Received event network-changed-fea60f3c-e539-418e-abfc-a7a41c223938 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 607.060416] env[63028]: DEBUG nova.compute.manager [req-317b5ef0-58c6-4f68-b3e3-9eda0100317c req-47006956-1f0e-4b6b-a311-fb9843b9a91a service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Refreshing instance network info cache due to event network-changed-fea60f3c-e539-418e-abfc-a7a41c223938. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 607.060699] env[63028]: DEBUG oslo_concurrency.lockutils [req-317b5ef0-58c6-4f68-b3e3-9eda0100317c req-47006956-1f0e-4b6b-a311-fb9843b9a91a service nova] Acquiring lock "refresh_cache-d663c2df-ae54-4c50-a70f-e2180700c700" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.061407] env[63028]: DEBUG oslo_concurrency.lockutils [req-317b5ef0-58c6-4f68-b3e3-9eda0100317c req-47006956-1f0e-4b6b-a311-fb9843b9a91a service nova] Acquired lock "refresh_cache-d663c2df-ae54-4c50-a70f-e2180700c700" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.061407] env[63028]: DEBUG nova.network.neutron [req-317b5ef0-58c6-4f68-b3e3-9eda0100317c req-47006956-1f0e-4b6b-a311-fb9843b9a91a service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Refreshing network info cache for port fea60f3c-e539-418e-abfc-a7a41c223938 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 607.063338] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91a8ae2d-21ae-4dde-967d-04de9fc8d5aa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.084872] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 607.085138] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 607.090179] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b58d278c-7f81-48d8-954c-069c9e07848f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.101268] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734986, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131565} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.102255] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 607.102592] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Waiting for the task: (returnval){ [ 607.102592] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f4bf9a-9e10-05f7-3a96-12b470e7a067" [ 607.102592] env[63028]: _type = "Task" [ 607.102592] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.103327] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e6ebfe-cad4-4d98-b94f-63c14e525a47 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.131303] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] c7a3f2c6-8368-49cc-9737-ea1d836f1783/c7a3f2c6-8368-49cc-9737-ea1d836f1783.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 607.135538] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-497a0eef-7bd3-4800-b00f-a5268b67bbc2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.151319] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 607.151617] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f4bf9a-9e10-05f7-3a96-12b470e7a067, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.153535] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1823104-923e-44ca-8727-2d6dcb407265 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.157383] env[63028]: DEBUG nova.compute.manager [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Received event network-changed-dbc0e58f-b646-4c47-becf-ba9c242ca9aa {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 607.157589] env[63028]: DEBUG nova.compute.manager [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Refreshing instance network info cache due to event network-changed-dbc0e58f-b646-4c47-becf-ba9c242ca9aa. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 607.157891] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Acquiring lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.158181] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Acquired lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.158491] env[63028]: DEBUG nova.network.neutron [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Refreshing network info cache for port dbc0e58f-b646-4c47-becf-ba9c242ca9aa {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 607.168115] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 607.168115] env[63028]: value = "task-2734994" [ 607.168115] env[63028]: _type = "Task" [ 607.168115] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.170835] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 607.170835] env[63028]: value = "task-2734995" [ 607.170835] env[63028]: _type = "Task" [ 607.170835] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.186321] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734994, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.189912] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734995, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.339464] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734988, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.377023] env[63028]: DEBUG oslo_vmware.api [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734989, 'name': PowerOffVM_Task, 'duration_secs': 0.255172} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.377623] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 607.377827] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 607.378450] env[63028]: INFO nova.compute.manager [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Took 25.05 seconds to build instance. [ 607.379399] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52e21b42-1117-44b5-b80f-91c466bf0db7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.385665] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.385901] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.427863] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 607.428121] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 607.428302] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Deleting the datastore file [datastore1] 1eeb96d1-6e03-4192-a9db-955444519fd7 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 607.428616] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7035b1d-40b4-4b8c-943a-afedd5675e70 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.438208] env[63028]: DEBUG oslo_vmware.api [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Waiting for the task: (returnval){ [ 607.438208] env[63028]: value = "task-2734997" [ 607.438208] env[63028]: _type = "Task" [ 607.438208] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.456692] env[63028]: DEBUG nova.compute.utils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 607.457089] env[63028]: DEBUG oslo_vmware.api [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.458536] env[63028]: DEBUG nova.compute.manager [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 607.458536] env[63028]: DEBUG nova.network.neutron [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 607.493479] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734992, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.546441] env[63028]: DEBUG nova.policy [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1db2756e554d4fa2a66cff81ab6d4105', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e14d427c980c486cbbe8ff0982a30428', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 607.563178] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 607.563462] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 607.563594] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Deleting the datastore file [datastore1] 67440140-a619-41f2-98fe-eff23e8ad8a5 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 607.563898] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734993, 'name': Rename_Task, 'duration_secs': 0.359574} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.564961] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a87d660-1110-431e-acdb-64086bb512e5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.568821] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 607.568821] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2a56aa3-f506-4562-983a-798826d5b27b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.578504] env[63028]: DEBUG oslo_vmware.api [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Waiting for the task: (returnval){ [ 607.578504] env[63028]: value = "task-2734998" [ 607.578504] env[63028]: _type = "Task" [ 607.578504] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.583259] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Waiting for the task: (returnval){ [ 607.583259] env[63028]: value = "task-2734999" [ 607.583259] env[63028]: _type = "Task" [ 607.583259] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.610217] env[63028]: DEBUG oslo_vmware.api [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734998, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.611266] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734999, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.639830] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f4bf9a-9e10-05f7-3a96-12b470e7a067, 'name': SearchDatastore_Task, 'duration_secs': 0.029934} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.641177] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e37b9cf3-1c9e-4031-88e7-9f48a3e2960d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.658285] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Waiting for the task: (returnval){ [ 607.658285] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5233f731-6e7c-8911-a7ad-877012353dae" [ 607.658285] env[63028]: _type = "Task" [ 607.658285] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.677585] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5233f731-6e7c-8911-a7ad-877012353dae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.706745] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.707947] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2734994, 'name': PowerOffVM_Task, 'duration_secs': 0.240644} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.708528] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 607.708879] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 607.710104] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b67218c-0650-47d1-9bda-504f01b823bd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.724104] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 607.724416] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d88f0e0b-e424-468e-b753-a34a0efa078d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.760918] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 607.761178] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 607.761399] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Deleting the datastore file [datastore2] 0dbafad1-ab21-439d-bc8e-e447ac33304e {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 607.761852] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0989931-7fa4-4b23-a169-9fcdf884c1b2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.775513] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 607.775513] env[63028]: value = "task-2735001" [ 607.775513] env[63028]: _type = "Task" [ 607.775513] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.787039] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.843594] env[63028]: DEBUG oslo_vmware.api [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2734988, 'name': RemoveSnapshot_Task, 'duration_secs': 0.844369} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.843774] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 607.843885] env[63028]: INFO nova.compute.manager [None req-a428f7da-b8aa-40cb-9fdc-1b56e9c461c4 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Took 16.97 seconds to snapshot the instance on the hypervisor. [ 607.892731] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d380ac33-3a3f-419f-b4d1-ad7edb67b04d tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lock "c9cc1ac7-06c6-415b-86ce-daf4849bfc05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.899s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.956198] env[63028]: DEBUG oslo_vmware.api [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Task: {'id': task-2734997, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186559} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.959731] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 607.959731] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 607.960084] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 607.960084] env[63028]: INFO nova.compute.manager [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Took 1.15 seconds to destroy the instance on the hypervisor. [ 607.960329] env[63028]: DEBUG oslo.service.loopingcall [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 607.960766] env[63028]: DEBUG nova.compute.manager [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 607.963550] env[63028]: DEBUG nova.compute.manager [-] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 607.963730] env[63028]: DEBUG nova.network.neutron [-] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 607.986586] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2734992, 'name': CreateVM_Task, 'duration_secs': 0.598126} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.986780] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 607.987656] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.987889] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.988355] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 607.988841] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6f04ded-b64b-461c-9301-a49c81455848 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.995245] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for the task: (returnval){ [ 607.995245] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524a53ad-6df1-4bc4-dc37-3e04a67eadd7" [ 607.995245] env[63028]: _type = "Task" [ 607.995245] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.008671] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524a53ad-6df1-4bc4-dc37-3e04a67eadd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.034168] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cafb568-45b9-4cee-973f-b174be54db82 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.038936] env[63028]: DEBUG nova.network.neutron [-] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.049387] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d790b26-d1c7-4da3-a0d0-9e44c0c4e98e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.094720] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2484f6a4-8995-4c4f-85ac-134c1fc0ce65 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.103234] env[63028]: DEBUG nova.network.neutron [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Updating instance_info_cache with network_info: [{"id": "6dc62708-050a-40f3-b99a-f51b25937806", "address": "fa:16:3e:1a:f4:86", "network": {"id": "350f4b14-d211-48c8-b1dd-06a0dd5805d1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-987689362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b3d1798e23e64325a3b6f699cd27d98f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6dc62708-05", "ovs_interfaceid": "6dc62708-050a-40f3-b99a-f51b25937806", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.108705] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "86d5d264-7a7a-434b-a1c4-e9a004c0a034" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.109521] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "86d5d264-7a7a-434b-a1c4-e9a004c0a034" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.122966] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff12de13-084b-4e52-85b8-cfbc4dbe4c11 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.127660] env[63028]: DEBUG oslo_vmware.api [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Task: {'id': task-2734998, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.283452} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.127903] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734999, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.128573] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 608.128996] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 608.128996] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 608.129204] env[63028]: INFO nova.compute.manager [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Took 2.09 seconds to destroy the instance on the hypervisor. [ 608.129400] env[63028]: DEBUG oslo.service.loopingcall [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 608.130025] env[63028]: DEBUG nova.compute.manager [-] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 608.130223] env[63028]: DEBUG nova.network.neutron [-] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 608.141523] env[63028]: DEBUG nova.compute.provider_tree [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 608.168659] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5233f731-6e7c-8911-a7ad-877012353dae, 'name': SearchDatastore_Task, 'duration_secs': 0.023355} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.169020] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.169232] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 4a782483-c24e-44db-b697-856c69cc4a13/4a782483-c24e-44db-b697-856c69cc4a13.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 608.169500] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-85d99aaf-9889-45f8-877f-300138bc2714 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.182871] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Waiting for the task: (returnval){ [ 608.182871] env[63028]: value = "task-2735002" [ 608.182871] env[63028]: _type = "Task" [ 608.182871] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.192799] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2734995, 'name': ReconfigVM_Task, 'duration_secs': 0.712455} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.193323] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Reconfigured VM instance instance-00000010 to attach disk [datastore2] c7a3f2c6-8368-49cc-9737-ea1d836f1783/c7a3f2c6-8368-49cc-9737-ea1d836f1783.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 608.194160] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39e21502-06d0-4b61-8fb3-09846ae0d24f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.199378] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735002, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.207800] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 608.207800] env[63028]: value = "task-2735003" [ 608.207800] env[63028]: _type = "Task" [ 608.207800] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.217305] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735003, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.286875] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151204} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.287325] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 608.287539] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 608.287539] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 608.341715] env[63028]: DEBUG nova.network.neutron [req-317b5ef0-58c6-4f68-b3e3-9eda0100317c req-47006956-1f0e-4b6b-a311-fb9843b9a91a service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Updated VIF entry in instance network info cache for port fea60f3c-e539-418e-abfc-a7a41c223938. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 608.341715] env[63028]: DEBUG nova.network.neutron [req-317b5ef0-58c6-4f68-b3e3-9eda0100317c req-47006956-1f0e-4b6b-a311-fb9843b9a91a service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Updating instance_info_cache with network_info: [{"id": "fea60f3c-e539-418e-abfc-a7a41c223938", "address": "fa:16:3e:64:6a:31", "network": {"id": "95ccbf6d-2bd9-42ff-93f9-5f9f541e5ba1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-863839356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9b6ca6cccb940f0a516e265a721fd03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfea60f3c-e5", "ovs_interfaceid": "fea60f3c-e539-418e-abfc-a7a41c223938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.398833] env[63028]: DEBUG nova.compute.manager [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 608.522984] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524a53ad-6df1-4bc4-dc37-3e04a67eadd7, 'name': SearchDatastore_Task, 'duration_secs': 0.016659} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.523290] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.523522] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 608.523745] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.523884] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.524067] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 608.524329] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04b0cd00-0f94-4faf-819d-b8912627cbad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.538615] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 608.538813] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 608.539696] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e157c4d-0c9a-4cb3-8f8c-ed1557defa4c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.544890] env[63028]: DEBUG nova.network.neutron [-] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.552800] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for the task: (returnval){ [ 608.552800] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5298e3c4-d547-09e9-2df6-672fbdf0ee92" [ 608.552800] env[63028]: _type = "Task" [ 608.552800] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.565781] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5298e3c4-d547-09e9-2df6-672fbdf0ee92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.607833] env[63028]: DEBUG oslo_vmware.api [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Task: {'id': task-2734999, 'name': PowerOnVM_Task, 'duration_secs': 0.708611} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.608593] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 608.608973] env[63028]: DEBUG nova.compute.manager [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 608.609651] env[63028]: DEBUG oslo_concurrency.lockutils [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Releasing lock "refresh_cache-5a330ed9-c106-49f2-b524-a424e717b5ce" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.613809] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0625c8d1-2432-45c2-be10-e8d23031ada2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.638778] env[63028]: DEBUG nova.network.neutron [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Updated VIF entry in instance network info cache for port dbc0e58f-b646-4c47-becf-ba9c242ca9aa. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 608.639231] env[63028]: DEBUG nova.network.neutron [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Updating instance_info_cache with network_info: [{"id": "dbc0e58f-b646-4c47-becf-ba9c242ca9aa", "address": "fa:16:3e:f3:d3:54", "network": {"id": "5f8a710c-b049-40fe-8cc4-00c97e954fd9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1979325486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "381de553d9da4c94b923d790c12a28a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbc0e58f-b6", "ovs_interfaceid": "dbc0e58f-b646-4c47-becf-ba9c242ca9aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.695185] env[63028]: ERROR nova.scheduler.client.report [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] [req-43ccf5f8-f078-4044-af1f-286f4e484df4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-43ccf5f8-f078-4044-af1f-286f4e484df4"}]} [ 608.707433] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735002, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.719741] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735003, 'name': Rename_Task, 'duration_secs': 0.389843} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.720857] env[63028]: DEBUG nova.scheduler.client.report [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Refreshing inventories for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 608.725893] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 608.725893] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-614e0ac2-b6e4-428a-ae5a-75f391eee7ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.737992] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 608.737992] env[63028]: value = "task-2735004" [ 608.737992] env[63028]: _type = "Task" [ 608.737992] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.744603] env[63028]: DEBUG nova.scheduler.client.report [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Updating ProviderTree inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 608.744880] env[63028]: DEBUG nova.compute.provider_tree [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 608.752522] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735004, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.768619] env[63028]: DEBUG nova.scheduler.client.report [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Refreshing aggregate associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, aggregates: None {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 608.798083] env[63028]: DEBUG nova.scheduler.client.report [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Refreshing trait associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 608.846094] env[63028]: DEBUG oslo_concurrency.lockutils [req-317b5ef0-58c6-4f68-b3e3-9eda0100317c req-47006956-1f0e-4b6b-a311-fb9843b9a91a service nova] Releasing lock "refresh_cache-d663c2df-ae54-4c50-a70f-e2180700c700" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.877783] env[63028]: DEBUG nova.network.neutron [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Successfully created port: 08a61148-5b3a-4bb0-a130-3eda62d6bf7c {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 608.934586] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.976619] env[63028]: DEBUG nova.compute.manager [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 609.009684] env[63028]: DEBUG nova.virt.hardware [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 609.009956] env[63028]: DEBUG nova.virt.hardware [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 609.011097] env[63028]: DEBUG nova.virt.hardware [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 609.011321] env[63028]: DEBUG nova.virt.hardware [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 609.011465] env[63028]: DEBUG nova.virt.hardware [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 609.011606] env[63028]: DEBUG nova.virt.hardware [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 609.011827] env[63028]: DEBUG nova.virt.hardware [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 609.011980] env[63028]: DEBUG nova.virt.hardware [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 609.012198] env[63028]: DEBUG nova.virt.hardware [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 609.012422] env[63028]: DEBUG nova.virt.hardware [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 609.013305] env[63028]: DEBUG nova.virt.hardware [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 609.014397] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1470322-7910-4ef3-9f43-d960bafddf79 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.036601] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e7d9e9-6786-49a8-bdf6-2110523deb3e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.053652] env[63028]: INFO nova.compute.manager [-] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Took 1.09 seconds to deallocate network for instance. [ 609.072908] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5298e3c4-d547-09e9-2df6-672fbdf0ee92, 'name': SearchDatastore_Task, 'duration_secs': 0.021762} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.073946] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c5b29bd-0502-4cac-b72b-e57e8a5bb20a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.083658] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for the task: (returnval){ [ 609.083658] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526b101c-550d-e7fe-1048-b64f7693dfaf" [ 609.083658] env[63028]: _type = "Task" [ 609.083658] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.096290] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526b101c-550d-e7fe-1048-b64f7693dfaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.111874] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "ba57ed92-aaef-460c-bd45-d0cbe09e4615" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.112576] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "ba57ed92-aaef-460c-bd45-d0cbe09e4615" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.144160] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Releasing lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.144434] env[63028]: DEBUG nova.compute.manager [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Received event network-vif-plugged-a290475c-c96a-4037-9a1f-e4340a86da15 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 609.144620] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Acquiring lock "4a782483-c24e-44db-b697-856c69cc4a13-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.144856] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Lock "4a782483-c24e-44db-b697-856c69cc4a13-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.144961] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Lock "4a782483-c24e-44db-b697-856c69cc4a13-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.149356] env[63028]: DEBUG nova.compute.manager [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] No waiting events found dispatching network-vif-plugged-a290475c-c96a-4037-9a1f-e4340a86da15 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 609.149591] env[63028]: WARNING nova.compute.manager [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Received unexpected event network-vif-plugged-a290475c-c96a-4037-9a1f-e4340a86da15 for instance with vm_state building and task_state spawning. [ 609.149953] env[63028]: DEBUG nova.compute.manager [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Received event network-changed-a290475c-c96a-4037-9a1f-e4340a86da15 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 609.149953] env[63028]: DEBUG nova.compute.manager [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Refreshing instance network info cache due to event network-changed-a290475c-c96a-4037-9a1f-e4340a86da15. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 609.150155] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Acquiring lock "refresh_cache-4a782483-c24e-44db-b697-856c69cc4a13" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.150320] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Acquired lock "refresh_cache-4a782483-c24e-44db-b697-856c69cc4a13" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.150541] env[63028]: DEBUG nova.network.neutron [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Refreshing network info cache for port a290475c-c96a-4037-9a1f-e4340a86da15 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 609.155379] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.198586] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735002, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.79853} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.201516] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 4a782483-c24e-44db-b697-856c69cc4a13/4a782483-c24e-44db-b697-856c69cc4a13.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 609.201736] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 609.203060] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b47333ae-7249-4477-a7d8-0a8d94a915f3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.211713] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Waiting for the task: (returnval){ [ 609.211713] env[63028]: value = "task-2735005" [ 609.211713] env[63028]: _type = "Task" [ 609.211713] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.225199] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735005, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.250889] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735004, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.338866] env[63028]: DEBUG nova.virt.hardware [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 609.339129] env[63028]: DEBUG nova.virt.hardware [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 609.339282] env[63028]: DEBUG nova.virt.hardware [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 609.339458] env[63028]: DEBUG nova.virt.hardware [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 609.339596] env[63028]: DEBUG nova.virt.hardware [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 609.339739] env[63028]: DEBUG nova.virt.hardware [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 609.339940] env[63028]: DEBUG nova.virt.hardware [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 609.342681] env[63028]: DEBUG nova.virt.hardware [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 609.343033] env[63028]: DEBUG nova.virt.hardware [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 609.343117] env[63028]: DEBUG nova.virt.hardware [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 609.343297] env[63028]: DEBUG nova.virt.hardware [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 609.344264] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f94655f-bdf2-4952-a9f3-89769e8fd950 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.359999] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19564f69-4954-49f8-856a-366dd5b56433 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.384678] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Instance VIF info [] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 609.393744] env[63028]: DEBUG oslo.service.loopingcall [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 609.397886] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 609.402408] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a86f6e0c-c870-4f1a-b151-43fc60e062d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.422389] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92be9a01-1164-418c-80e3-8f48343d7010 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.427431] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 609.427431] env[63028]: value = "task-2735006" [ 609.427431] env[63028]: _type = "Task" [ 609.427431] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.434605] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ab14e1-eb35-4b18-9441-89e53bfd4f85 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.444540] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735006, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.483797] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef40b79e-1b76-470a-8072-7685da37cc90 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.487237] env[63028]: DEBUG nova.network.neutron [-] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.494709] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf97ea6-a45e-433a-bd2c-5569730f48c7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.512357] env[63028]: DEBUG nova.compute.provider_tree [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 609.560731] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.600556] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526b101c-550d-e7fe-1048-b64f7693dfaf, 'name': SearchDatastore_Task, 'duration_secs': 0.01569} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.601631] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.601631] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 44fca05f-51db-4252-bcf8-6bcad37a6147/44fca05f-51db-4252-bcf8-6bcad37a6147.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 609.601631] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-357e2ef7-85fd-4e77-a1b0-486d42831668 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.612742] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for the task: (returnval){ [ 609.612742] env[63028]: value = "task-2735008" [ 609.612742] env[63028]: _type = "Task" [ 609.612742] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.628655] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735008, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.730773] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735005, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080037} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.730773] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 609.730985] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52608c0f-1c06-4a99-be32-2c3b182cf9f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.762527] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] 4a782483-c24e-44db-b697-856c69cc4a13/4a782483-c24e-44db-b697-856c69cc4a13.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 609.767432] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f36c99fd-71be-48e6-9203-e687b04c5b9f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.789935] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735004, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.792204] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Waiting for the task: (returnval){ [ 609.792204] env[63028]: value = "task-2735009" [ 609.792204] env[63028]: _type = "Task" [ 609.792204] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.814481] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735009, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.942535] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735006, 'name': CreateVM_Task, 'duration_secs': 0.473544} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.942535] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 609.942535] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.942535] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.942535] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 609.942535] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47befed3-3c20-4ec7-b6d8-e56baa25d51f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.950489] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 609.950489] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529accbf-cf34-dcb0-bd46-f2e701fe9b4d" [ 609.950489] env[63028]: _type = "Task" [ 609.950489] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.960243] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529accbf-cf34-dcb0-bd46-f2e701fe9b4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.989730] env[63028]: INFO nova.compute.manager [-] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Took 1.86 seconds to deallocate network for instance. [ 610.058686] env[63028]: DEBUG nova.scheduler.client.report [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Updated inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with generation 33 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 610.059131] env[63028]: DEBUG nova.compute.provider_tree [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 33 to 34 during operation: update_inventory {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 610.059427] env[63028]: DEBUG nova.compute.provider_tree [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 610.129344] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735008, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.132486] env[63028]: DEBUG nova.network.neutron [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Updated VIF entry in instance network info cache for port a290475c-c96a-4037-9a1f-e4340a86da15. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 610.132486] env[63028]: DEBUG nova.network.neutron [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Updating instance_info_cache with network_info: [{"id": "a290475c-c96a-4037-9a1f-e4340a86da15", "address": "fa:16:3e:0a:4f:a0", "network": {"id": "eea88aa4-8c75-4cce-b1a6-7b2e64245351", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-868430466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b056498f618493295359c1784b6660a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa290475c-c9", "ovs_interfaceid": "a290475c-c96a-4037-9a1f-e4340a86da15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.189229] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 610.189229] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0502715-8aea-4bb0-b668-611fcba94458 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.201515] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 610.201515] env[63028]: value = "task-2735010" [ 610.201515] env[63028]: _type = "Task" [ 610.201515] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.222684] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735010, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.267349] env[63028]: DEBUG oslo_vmware.api [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735004, 'name': PowerOnVM_Task, 'duration_secs': 1.062009} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.267349] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 610.267602] env[63028]: INFO nova.compute.manager [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Took 12.31 seconds to spawn the instance on the hypervisor. [ 610.267755] env[63028]: DEBUG nova.compute.manager [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 610.268891] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e00dc2c-7e20-4c77-991a-d4f5f7995e04 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.304924] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735009, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.430175] env[63028]: DEBUG nova.compute.manager [req-a92cea67-0b62-42e3-9f9f-137dd5920397 req-be2c7fe8-8cb1-41cb-9e29-e4cfef35b85d service nova] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Received event network-vif-deleted-f7a43b4b-f49a-4b79-b488-55fd4852195c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 610.463202] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529accbf-cf34-dcb0-bd46-f2e701fe9b4d, 'name': SearchDatastore_Task, 'duration_secs': 0.029064} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.463582] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.463833] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 610.464117] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.464238] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.464484] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 610.464741] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7cdf8c0-e95c-488c-9993-80a7b24fb303 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.479402] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 610.479598] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 610.480654] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d83bc5c-8955-4152-b90c-6cd49abb4876 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.487792] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 610.487792] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527c54bd-ae0a-cad3-6a80-ec7c0e6d466b" [ 610.487792] env[63028]: _type = "Task" [ 610.487792] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.497814] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.503633] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527c54bd-ae0a-cad3-6a80-ec7c0e6d466b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.566968] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.624s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.569450] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.568s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.569655] env[63028]: DEBUG nova.objects.instance [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63028) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 610.605154] env[63028]: INFO nova.scheduler.client.report [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Deleted allocations for instance a167df01-05e4-453d-8800-9c104d912474 [ 610.630587] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735008, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.710017} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.631207] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 44fca05f-51db-4252-bcf8-6bcad37a6147/44fca05f-51db-4252-bcf8-6bcad37a6147.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 610.631207] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 610.631396] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc6f0b32-8a51-43a4-9b4e-6bd7a90a4e25 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.637707] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Releasing lock "refresh_cache-4a782483-c24e-44db-b697-856c69cc4a13" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.638146] env[63028]: DEBUG nova.compute.manager [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Received event network-vif-plugged-4fb8a759-76bd-4b37-a810-2665ea4a32b3 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 610.638380] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Acquiring lock "44fca05f-51db-4252-bcf8-6bcad37a6147-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.639656] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Lock "44fca05f-51db-4252-bcf8-6bcad37a6147-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.639656] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Lock "44fca05f-51db-4252-bcf8-6bcad37a6147-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.639656] env[63028]: DEBUG nova.compute.manager [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] No waiting events found dispatching network-vif-plugged-4fb8a759-76bd-4b37-a810-2665ea4a32b3 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 610.639656] env[63028]: WARNING nova.compute.manager [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Received unexpected event network-vif-plugged-4fb8a759-76bd-4b37-a810-2665ea4a32b3 for instance with vm_state building and task_state spawning. [ 610.639656] env[63028]: DEBUG nova.compute.manager [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Received event network-changed-4fb8a759-76bd-4b37-a810-2665ea4a32b3 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 610.639952] env[63028]: DEBUG nova.compute.manager [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Refreshing instance network info cache due to event network-changed-4fb8a759-76bd-4b37-a810-2665ea4a32b3. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 610.639952] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Acquiring lock "refresh_cache-44fca05f-51db-4252-bcf8-6bcad37a6147" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.640087] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Acquired lock "refresh_cache-44fca05f-51db-4252-bcf8-6bcad37a6147" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.640642] env[63028]: DEBUG nova.network.neutron [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Refreshing network info cache for port 4fb8a759-76bd-4b37-a810-2665ea4a32b3 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 610.643809] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for the task: (returnval){ [ 610.643809] env[63028]: value = "task-2735011" [ 610.643809] env[63028]: _type = "Task" [ 610.643809] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.653913] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735011, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.714450] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735010, 'name': PowerOffVM_Task, 'duration_secs': 0.386498} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.715195] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 610.716487] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4196bd7-7181-4e7c-9eb4-896e0627764b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.739562] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09658ba4-a711-4a4d-890e-a4483f502fd3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.794313] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 610.794875] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d297814-c2b4-487c-8dd7-c9b4f3a37e59 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.807335] env[63028]: INFO nova.compute.manager [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Took 27.92 seconds to build instance. [ 610.817524] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735009, 'name': ReconfigVM_Task, 'duration_secs': 0.883312} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.819412] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Reconfigured VM instance instance-00000011 to attach disk [datastore2] 4a782483-c24e-44db-b697-856c69cc4a13/4a782483-c24e-44db-b697-856c69cc4a13.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 610.820324] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 610.820324] env[63028]: value = "task-2735012" [ 610.820324] env[63028]: _type = "Task" [ 610.820324] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.821341] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff456cd3-e711-47bb-89f9-69bdd3ba3c28 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.840425] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 610.840854] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 610.841143] env[63028]: DEBUG oslo_concurrency.lockutils [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.841478] env[63028]: DEBUG oslo_concurrency.lockutils [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.841562] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 610.841953] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Waiting for the task: (returnval){ [ 610.841953] env[63028]: value = "task-2735013" [ 610.841953] env[63028]: _type = "Task" [ 610.841953] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.844881] env[63028]: DEBUG nova.network.neutron [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Successfully updated port: 08a61148-5b3a-4bb0-a130-3eda62d6bf7c {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 610.845639] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa2aaa39-e52a-45ae-a10e-1bbc57fff97c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.861397] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "b9d9fe4e-438c-4f68-b011-9eb9e10a381c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.861715] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "b9d9fe4e-438c-4f68-b011-9eb9e10a381c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.868845] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735013, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.870154] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 610.870455] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 610.871065] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c9819f4-e78d-4e57-be12-7b4def06837a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.878967] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 610.878967] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525cc760-16e6-f63e-9655-b275242da820" [ 610.878967] env[63028]: _type = "Task" [ 610.878967] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.891090] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525cc760-16e6-f63e-9655-b275242da820, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.004645] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527c54bd-ae0a-cad3-6a80-ec7c0e6d466b, 'name': SearchDatastore_Task, 'duration_secs': 0.028451} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.005245] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c44e37e2-4e24-47c9-8118-b760ba311711 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.014441] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 611.014441] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a27097-6b64-f84a-9471-80af5ac36896" [ 611.014441] env[63028]: _type = "Task" [ 611.014441] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.024087] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a27097-6b64-f84a-9471-80af5ac36896, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.113983] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5c7e971-5f6a-4ff0-9cde-4fb9929da353 tempest-ServerDiagnosticsNegativeTest-1368205052 tempest-ServerDiagnosticsNegativeTest-1368205052-project-member] Lock "a167df01-05e4-453d-8800-9c104d912474" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.353s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.157030] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735011, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076295} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.157936] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 611.158119] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7319f7a-ea0b-47f5-84f8-948b3159c4c0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.183914] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] 44fca05f-51db-4252-bcf8-6bcad37a6147/44fca05f-51db-4252-bcf8-6bcad37a6147.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 611.184288] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50e7c24a-d7d9-4cf8-a66e-4d5540835221 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.209575] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for the task: (returnval){ [ 611.209575] env[63028]: value = "task-2735014" [ 611.209575] env[63028]: _type = "Task" [ 611.209575] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.220121] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735014, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.311110] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b7d0a7d-24c8-4fd0-b2fc-d929be4c8481 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.682s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.351347] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.351665] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.351991] env[63028]: DEBUG nova.network.neutron [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 611.371032] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735013, 'name': Rename_Task, 'duration_secs': 0.406713} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.371308] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 611.371595] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b64030ab-ae05-42eb-b4c0-906051aca073 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.387286] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Waiting for the task: (returnval){ [ 611.387286] env[63028]: value = "task-2735015" [ 611.387286] env[63028]: _type = "Task" [ 611.387286] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.397615] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525cc760-16e6-f63e-9655-b275242da820, 'name': SearchDatastore_Task, 'duration_secs': 0.039165} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.398936] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54c4d1e5-b9fa-4248-ac6a-2112605c3a8d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.406094] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735015, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.411363] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 611.411363] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52985d7d-4d88-d911-f43f-b1a8fd2381bc" [ 611.411363] env[63028]: _type = "Task" [ 611.411363] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.421430] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52985d7d-4d88-d911-f43f-b1a8fd2381bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.528340] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a27097-6b64-f84a-9471-80af5ac36896, 'name': SearchDatastore_Task, 'duration_secs': 0.040183} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.529439] env[63028]: DEBUG nova.network.neutron [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Updated VIF entry in instance network info cache for port 4fb8a759-76bd-4b37-a810-2665ea4a32b3. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 611.529729] env[63028]: DEBUG nova.network.neutron [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Updating instance_info_cache with network_info: [{"id": "4fb8a759-76bd-4b37-a810-2665ea4a32b3", "address": "fa:16:3e:6b:a2:3a", "network": {"id": "3a62b3ba-178e-4e4a-b8ce-a36e2a779888", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1226406934-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "824cdcbbc0e1478b9d900cde707bc67b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fb8a759-76", "ovs_interfaceid": "4fb8a759-76bd-4b37-a810-2665ea4a32b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.531772] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.531772] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 0dbafad1-ab21-439d-bc8e-e447ac33304e/0dbafad1-ab21-439d-bc8e-e447ac33304e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 611.532095] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7109c79-2a27-40e5-a7f4-e43a921919b2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.541299] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 611.541299] env[63028]: value = "task-2735016" [ 611.541299] env[63028]: _type = "Task" [ 611.541299] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.553453] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.579642] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c538ab0a-f047-4352-93f7-b849465565d9 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.580961] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.688s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.581267] env[63028]: DEBUG nova.objects.instance [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Lazy-loading 'resources' on Instance uuid f80df630-327b-4923-a785-5d2e48fe1f19 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 611.595033] env[63028]: DEBUG oslo_concurrency.lockutils [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquiring lock "94b1bf30-0f9b-4197-99ff-6631a13ab2d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.595272] env[63028]: DEBUG oslo_concurrency.lockutils [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lock "94b1bf30-0f9b-4197-99ff-6631a13ab2d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.595505] env[63028]: DEBUG oslo_concurrency.lockutils [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquiring lock "94b1bf30-0f9b-4197-99ff-6631a13ab2d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.595704] env[63028]: DEBUG oslo_concurrency.lockutils [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lock "94b1bf30-0f9b-4197-99ff-6631a13ab2d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.595896] env[63028]: DEBUG oslo_concurrency.lockutils [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lock "94b1bf30-0f9b-4197-99ff-6631a13ab2d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.602433] env[63028]: INFO nova.compute.manager [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Terminating instance [ 611.722987] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735014, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.815545] env[63028]: DEBUG nova.compute.manager [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 611.904894] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735015, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.923687] env[63028]: DEBUG nova.network.neutron [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.941745] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52985d7d-4d88-d911-f43f-b1a8fd2381bc, 'name': SearchDatastore_Task, 'duration_secs': 0.017382} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.941928] env[63028]: DEBUG oslo_concurrency.lockutils [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.942292] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 5a330ed9-c106-49f2-b524-a424e717b5ce/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk. {{(pid=63028) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 611.942455] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35dd8da8-8937-4243-b42e-6368b7001678 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.961455] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 611.961455] env[63028]: value = "task-2735018" [ 611.961455] env[63028]: _type = "Task" [ 611.961455] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.973492] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735018, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.005836] env[63028]: DEBUG nova.compute.manager [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 612.007122] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4984ae88-abbc-47b1-9334-b8d0572cc870 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.033517] env[63028]: DEBUG oslo_concurrency.lockutils [req-92f9885a-288d-45f6-a26c-b0276fce64bc req-2df9ef2f-68c7-479e-96ee-5f1cdda8110a service nova] Releasing lock "refresh_cache-44fca05f-51db-4252-bcf8-6bcad37a6147" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.060378] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735016, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.110030] env[63028]: DEBUG oslo_concurrency.lockutils [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquiring lock "refresh_cache-94b1bf30-0f9b-4197-99ff-6631a13ab2d1" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.110030] env[63028]: DEBUG oslo_concurrency.lockutils [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquired lock "refresh_cache-94b1bf30-0f9b-4197-99ff-6631a13ab2d1" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.110030] env[63028]: DEBUG nova.network.neutron [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 612.154020] env[63028]: DEBUG nova.network.neutron [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance_info_cache with network_info: [{"id": "08a61148-5b3a-4bb0-a130-3eda62d6bf7c", "address": "fa:16:3e:8e:67:6c", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08a61148-5b", "ovs_interfaceid": "08a61148-5b3a-4bb0-a130-3eda62d6bf7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.222715] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735014, 'name': ReconfigVM_Task, 'duration_secs': 0.515591} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.227778] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Reconfigured VM instance instance-00000012 to attach disk [datastore2] 44fca05f-51db-4252-bcf8-6bcad37a6147/44fca05f-51db-4252-bcf8-6bcad37a6147.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 612.228891] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a2dba3f-9eed-4864-acc6-7d00b44cf395 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.237981] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for the task: (returnval){ [ 612.237981] env[63028]: value = "task-2735019" [ 612.237981] env[63028]: _type = "Task" [ 612.237981] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.249109] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735019, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.341113] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.403098] env[63028]: DEBUG oslo_vmware.api [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735015, 'name': PowerOnVM_Task, 'duration_secs': 0.550357} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.403098] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 612.403098] env[63028]: INFO nova.compute.manager [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Took 11.78 seconds to spawn the instance on the hypervisor. [ 612.403098] env[63028]: DEBUG nova.compute.manager [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 612.403642] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01040f9c-a4a2-4b58-a824-c3035519730a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.479365] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735018, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.525077] env[63028]: INFO nova.compute.manager [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] instance snapshotting [ 612.528348] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784b8432-5eec-4be8-a83e-28a44cbdb670 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.561467] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007d94d0-495c-46d8-8d87-9a53620ba630 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.576109] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735016, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.632654} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.576109] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 0dbafad1-ab21-439d-bc8e-e447ac33304e/0dbafad1-ab21-439d-bc8e-e447ac33304e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 612.576402] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 612.576572] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f88fd7a1-5e1b-42c4-beaa-af269234ec7d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.588736] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 612.588736] env[63028]: value = "task-2735020" [ 612.588736] env[63028]: _type = "Task" [ 612.588736] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.599280] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735020, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.624768] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ce4949-519b-404e-a4ce-4fd21b993555 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.639575] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82c4aed-a530-41e2-8370-ad4e936f9538 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.646512] env[63028]: DEBUG nova.network.neutron [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.679654] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.680942] env[63028]: DEBUG nova.compute.manager [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Instance network_info: |[{"id": "08a61148-5b3a-4bb0-a130-3eda62d6bf7c", "address": "fa:16:3e:8e:67:6c", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08a61148-5b", "ovs_interfaceid": "08a61148-5b3a-4bb0-a130-3eda62d6bf7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 612.684081] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:67:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08a61148-5b3a-4bb0-a130-3eda62d6bf7c', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 612.691118] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Creating folder: Project (e14d427c980c486cbbe8ff0982a30428). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 612.691962] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2978fd-ecee-4d0e-ad30-015c02e0d4b5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.695596] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5564c28-8eac-4c75-a09c-0abd4b9b34c0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.704988] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a194aed-d998-4c25-911e-01a1af0633ca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.711429] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Created folder: Project (e14d427c980c486cbbe8ff0982a30428) in parent group-v550570. [ 612.711677] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Creating folder: Instances. Parent ref: group-v550631. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 612.712641] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e17867e5-1056-4f7e-a908-f85592793398 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.724317] env[63028]: DEBUG nova.compute.provider_tree [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.740307] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Created folder: Instances in parent group-v550631. [ 612.740734] env[63028]: DEBUG oslo.service.loopingcall [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 612.744535] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 612.745280] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c68594b8-c798-44dd-a34d-b97833365c8b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.768806] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735019, 'name': Rename_Task, 'duration_secs': 0.249768} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.770519] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 612.770735] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 612.770735] env[63028]: value = "task-2735023" [ 612.770735] env[63028]: _type = "Task" [ 612.770735] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.771140] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7bd84f8-ade2-49df-a979-37af3f5ffb5a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.783299] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735023, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.785242] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for the task: (returnval){ [ 612.785242] env[63028]: value = "task-2735024" [ 612.785242] env[63028]: _type = "Task" [ 612.785242] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.795334] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "f311a533-5c48-410b-ba3b-58f0032c8816" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.795683] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "f311a533-5c48-410b-ba3b-58f0032c8816" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.795926] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "f311a533-5c48-410b-ba3b-58f0032c8816-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.796544] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "f311a533-5c48-410b-ba3b-58f0032c8816-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.796544] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "f311a533-5c48-410b-ba3b-58f0032c8816-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.798212] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735024, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.799207] env[63028]: DEBUG nova.network.neutron [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.801084] env[63028]: INFO nova.compute.manager [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Terminating instance [ 612.929710] env[63028]: INFO nova.compute.manager [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Took 25.80 seconds to build instance. [ 612.975737] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735018, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.927539} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.975921] env[63028]: INFO nova.virt.vmwareapi.ds_util [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 5a330ed9-c106-49f2-b524-a424e717b5ce/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk. [ 612.976793] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b52603a-712f-4ce2-9689-2ed586d8b30a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.011275] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 5a330ed9-c106-49f2-b524-a424e717b5ce/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 613.012534] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d69e188b-77f6-4011-b97e-65fbf617b23f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.036875] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 613.036875] env[63028]: value = "task-2735025" [ 613.036875] env[63028]: _type = "Task" [ 613.036875] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.051337] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735025, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.077554] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 613.078103] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-feda9335-f2e5-47f6-ac80-48a74049743a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.087743] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 613.087743] env[63028]: value = "task-2735026" [ 613.087743] env[63028]: _type = "Task" [ 613.087743] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.108090] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735026, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.108435] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735020, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083211} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.108708] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 613.109597] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20cd6b75-304f-41f5-b45f-6af7a6217cfc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.137703] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 0dbafad1-ab21-439d-bc8e-e447ac33304e/0dbafad1-ab21-439d-bc8e-e447ac33304e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 613.138086] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7b5e2a0-585a-461b-be16-3190548d6a01 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.161949] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 613.161949] env[63028]: value = "task-2735027" [ 613.161949] env[63028]: _type = "Task" [ 613.161949] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.172060] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735027, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.228787] env[63028]: DEBUG nova.scheduler.client.report [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 613.288160] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735023, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.299800] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735024, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.303593] env[63028]: DEBUG oslo_concurrency.lockutils [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Releasing lock "refresh_cache-94b1bf30-0f9b-4197-99ff-6631a13ab2d1" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.304534] env[63028]: DEBUG nova.compute.manager [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 613.304534] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.306164] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737ef5d2-03f3-4654-8d97-17f7cb5ffadc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.310366] env[63028]: DEBUG nova.compute.manager [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 613.311084] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.311872] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ba6ca4-8b00-43c1-93ad-b5fed79efdba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.322145] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 613.324759] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70d19ad3-d865-4f39-9745-e67f56c14072 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.326533] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 613.327389] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62caa01d-d694-43d2-a154-a0c955b8f7bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.337102] env[63028]: DEBUG oslo_vmware.api [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 613.337102] env[63028]: value = "task-2735028" [ 613.337102] env[63028]: _type = "Task" [ 613.337102] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.353926] env[63028]: DEBUG oslo_vmware.api [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2735028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.407503] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 613.407503] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 613.407682] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleting the datastore file [datastore2] f311a533-5c48-410b-ba3b-58f0032c8816 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 613.407940] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a218673d-5846-4360-b627-ccb58327bafc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.419285] env[63028]: DEBUG oslo_vmware.api [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 613.419285] env[63028]: value = "task-2735030" [ 613.419285] env[63028]: _type = "Task" [ 613.419285] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.437794] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fa9cf96e-1fa7-41d8-8fc5-4ef011c3de54 tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Lock "4a782483-c24e-44db-b697-856c69cc4a13" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.775s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.438707] env[63028]: DEBUG oslo_vmware.api [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735030, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.548702] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735025, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.606584] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735026, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.679601] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735027, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.740395] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.159s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.743321] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.820s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.745751] env[63028]: INFO nova.compute.claims [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.780345] env[63028]: INFO nova.scheduler.client.report [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Deleted allocations for instance f80df630-327b-4923-a785-5d2e48fe1f19 [ 613.794995] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735023, 'name': CreateVM_Task, 'duration_secs': 0.554341} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.803260] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 613.803882] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.803974] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.804384] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 613.805144] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5b343fe-a64c-46eb-bc7a-d9c6f60278eb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.811940] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735024, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.821500] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 613.821500] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f67905-eec7-62af-cf7c-9ff773892a00" [ 613.821500] env[63028]: _type = "Task" [ 613.821500] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.832668] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f67905-eec7-62af-cf7c-9ff773892a00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.854605] env[63028]: DEBUG oslo_vmware.api [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2735028, 'name': PowerOffVM_Task, 'duration_secs': 0.199076} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.855497] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 613.855497] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 613.855497] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3407a32-a01c-414a-91fd-93b180851a26 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.908764] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 613.908980] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 613.909180] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Deleting the datastore file [datastore2] 94b1bf30-0f9b-4197-99ff-6631a13ab2d1 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 613.909477] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3837a16-d19c-4614-8302-108627111d7c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.921451] env[63028]: DEBUG oslo_vmware.api [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for the task: (returnval){ [ 613.921451] env[63028]: value = "task-2735033" [ 613.921451] env[63028]: _type = "Task" [ 613.921451] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.945843] env[63028]: DEBUG nova.compute.manager [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 613.949514] env[63028]: DEBUG oslo_vmware.api [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735030, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328358} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.952416] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 613.952625] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 613.952800] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 613.952968] env[63028]: INFO nova.compute.manager [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Took 0.64 seconds to destroy the instance on the hypervisor. [ 613.953236] env[63028]: DEBUG oslo.service.loopingcall [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 613.953438] env[63028]: DEBUG oslo_vmware.api [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2735033, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.953640] env[63028]: DEBUG nova.compute.manager [-] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 613.953727] env[63028]: DEBUG nova.network.neutron [-] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.052517] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735025, 'name': ReconfigVM_Task, 'duration_secs': 0.759348} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.053121] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 5a330ed9-c106-49f2-b524-a424e717b5ce/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 614.054118] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67149d0a-3cb4-4390-b7a8-eebc5a0f888f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.085526] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e251945a-18b5-45d8-9ad8-a0cc17dd06c7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.105657] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735026, 'name': CreateSnapshot_Task, 'duration_secs': 0.961764} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.108791] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 614.108791] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 614.108791] env[63028]: value = "task-2735034" [ 614.108791] env[63028]: _type = "Task" [ 614.108791] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.109519] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c06f81-ff7f-4485-a445-1317e9b929de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.131954] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.175170] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735027, 'name': ReconfigVM_Task, 'duration_secs': 0.602327} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.175444] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 0dbafad1-ab21-439d-bc8e-e447ac33304e/0dbafad1-ab21-439d-bc8e-e447ac33304e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 614.176118] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-443c1a3a-7712-4f32-820f-2ff7d23d2450 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.185909] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 614.185909] env[63028]: value = "task-2735035" [ 614.185909] env[63028]: _type = "Task" [ 614.185909] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.197204] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735035, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.309508] env[63028]: DEBUG oslo_vmware.api [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735024, 'name': PowerOnVM_Task, 'duration_secs': 1.05815} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.313135] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f56ccc99-23c5-4ed6-8376-04ed81d169c1 tempest-ImagesNegativeTestJSON-511377502 tempest-ImagesNegativeTestJSON-511377502-project-member] Lock "f80df630-327b-4923-a785-5d2e48fe1f19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.957s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.313135] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 614.313135] env[63028]: INFO nova.compute.manager [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Took 11.16 seconds to spawn the instance on the hypervisor. [ 614.313135] env[63028]: DEBUG nova.compute.manager [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 614.313135] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a87dfb7-eee9-4403-a353-3aa838e44e7f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.334073] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f67905-eec7-62af-cf7c-9ff773892a00, 'name': SearchDatastore_Task, 'duration_secs': 0.019381} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.334348] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.334591] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 614.335260] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.335487] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.335630] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 614.335906] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4a8422a-a032-4c2d-8e9b-7e8ac5888d88 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.348560] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 614.348640] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 614.349396] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fb722d9-2b8c-46bc-918a-b49ed78c6b8b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.356013] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 614.356013] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525b7db1-beb5-6d89-9c85-2c6f674ccfe5" [ 614.356013] env[63028]: _type = "Task" [ 614.356013] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.360802] env[63028]: DEBUG nova.compute.manager [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 614.361673] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6ef8d8-e2e1-4b31-9ba1-ad11473e0d6e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.372440] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525b7db1-beb5-6d89-9c85-2c6f674ccfe5, 'name': SearchDatastore_Task, 'duration_secs': 0.013674} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.377833] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7594537-976f-4146-9bb9-5e23fe671226 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.385030] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 614.385030] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5227e9d5-8ffd-77e2-c530-9eb7deae6049" [ 614.385030] env[63028]: _type = "Task" [ 614.385030] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.397090] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5227e9d5-8ffd-77e2-c530-9eb7deae6049, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.435977] env[63028]: DEBUG oslo_vmware.api [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Task: {'id': task-2735033, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.353853} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.436172] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 614.436334] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 614.436585] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.436775] env[63028]: INFO nova.compute.manager [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Took 1.13 seconds to destroy the instance on the hypervisor. [ 614.437316] env[63028]: DEBUG oslo.service.loopingcall [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.437316] env[63028]: DEBUG nova.compute.manager [-] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 614.437479] env[63028]: DEBUG nova.network.neutron [-] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.460803] env[63028]: DEBUG nova.network.neutron [-] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.474932] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.542869] env[63028]: DEBUG nova.compute.manager [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Received event network-vif-plugged-08a61148-5b3a-4bb0-a130-3eda62d6bf7c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 614.543122] env[63028]: DEBUG oslo_concurrency.lockutils [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] Acquiring lock "f3277886-4498-45c6-be68-e71d8293dc00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.545131] env[63028]: DEBUG oslo_concurrency.lockutils [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] Lock "f3277886-4498-45c6-be68-e71d8293dc00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.545131] env[63028]: DEBUG oslo_concurrency.lockutils [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] Lock "f3277886-4498-45c6-be68-e71d8293dc00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.545131] env[63028]: DEBUG nova.compute.manager [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] [instance: f3277886-4498-45c6-be68-e71d8293dc00] No waiting events found dispatching network-vif-plugged-08a61148-5b3a-4bb0-a130-3eda62d6bf7c {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 614.545131] env[63028]: WARNING nova.compute.manager [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Received unexpected event network-vif-plugged-08a61148-5b3a-4bb0-a130-3eda62d6bf7c for instance with vm_state building and task_state spawning. [ 614.545131] env[63028]: DEBUG nova.compute.manager [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Received event network-changed-08a61148-5b3a-4bb0-a130-3eda62d6bf7c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 614.545506] env[63028]: DEBUG nova.compute.manager [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Refreshing instance network info cache due to event network-changed-08a61148-5b3a-4bb0-a130-3eda62d6bf7c. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 614.545506] env[63028]: DEBUG oslo_concurrency.lockutils [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] Acquiring lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.545809] env[63028]: DEBUG oslo_concurrency.lockutils [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] Acquired lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.545809] env[63028]: DEBUG nova.network.neutron [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Refreshing network info cache for port 08a61148-5b3a-4bb0-a130-3eda62d6bf7c {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 614.631282] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735034, 'name': ReconfigVM_Task, 'duration_secs': 0.363803} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.632477] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 614.644059] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 614.644455] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37400f0a-d32d-4162-a039-43a7dba4f7f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.646513] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e238deb9-2c99-4387-aad7-390f55a0a203 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.656557] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 614.656557] env[63028]: value = "task-2735037" [ 614.656557] env[63028]: _type = "Task" [ 614.656557] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.658191] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 614.658191] env[63028]: value = "task-2735036" [ 614.658191] env[63028]: _type = "Task" [ 614.658191] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.671954] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735037, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.675683] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.699855] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735035, 'name': Rename_Task, 'duration_secs': 0.343775} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.699855] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 614.704160] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2efc3e6-72c9-4686-a826-ee5a9f181058 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.713190] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 614.713190] env[63028]: value = "task-2735038" [ 614.713190] env[63028]: _type = "Task" [ 614.713190] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.732893] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735038, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.797917] env[63028]: DEBUG nova.network.neutron [-] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.837066] env[63028]: INFO nova.compute.manager [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Took 26.56 seconds to build instance. [ 614.884182] env[63028]: INFO nova.compute.manager [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] instance snapshotting [ 614.886724] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b68ce18-bf94-4517-89f0-460a46544588 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.902927] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5227e9d5-8ffd-77e2-c530-9eb7deae6049, 'name': SearchDatastore_Task, 'duration_secs': 0.013296} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.923518] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.923850] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] f3277886-4498-45c6-be68-e71d8293dc00/f3277886-4498-45c6-be68-e71d8293dc00.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 614.927385] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3993e42-9eaa-47b3-adc8-3cca07b1a51c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.929939] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77386663-df62-466a-b518-8e2469b7e728 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.942517] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 614.942517] env[63028]: value = "task-2735039" [ 614.942517] env[63028]: _type = "Task" [ 614.942517] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.957718] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735039, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.967286] env[63028]: DEBUG nova.network.neutron [-] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.173015] env[63028]: DEBUG nova.compute.manager [req-dfdee910-244f-4e8b-a567-e2f000df3bfd req-cb228311-9c08-482d-9a8d-4c5a674bbd01 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Received event network-changed-892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 615.173295] env[63028]: DEBUG nova.compute.manager [req-dfdee910-244f-4e8b-a567-e2f000df3bfd req-cb228311-9c08-482d-9a8d-4c5a674bbd01 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Refreshing instance network info cache due to event network-changed-892c8e3d-851e-4ad1-bbab-938e49f4cba1. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 615.173654] env[63028]: DEBUG oslo_concurrency.lockutils [req-dfdee910-244f-4e8b-a567-e2f000df3bfd req-cb228311-9c08-482d-9a8d-4c5a674bbd01 service nova] Acquiring lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.173932] env[63028]: DEBUG oslo_concurrency.lockutils [req-dfdee910-244f-4e8b-a567-e2f000df3bfd req-cb228311-9c08-482d-9a8d-4c5a674bbd01 service nova] Acquired lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.174249] env[63028]: DEBUG nova.network.neutron [req-dfdee910-244f-4e8b-a567-e2f000df3bfd req-cb228311-9c08-482d-9a8d-4c5a674bbd01 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Refreshing network info cache for port 892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 615.180957] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735037, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.189182] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735036, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.221365] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "b9db75ba-6832-45e8-8faf-d1cdaa7dabdd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.221365] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "b9db75ba-6832-45e8-8faf-d1cdaa7dabdd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.236088] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735038, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.289404] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01bf6fbc-7de6-4be3-b5b4-2f5fff22c7ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.300779] env[63028]: INFO nova.compute.manager [-] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Took 1.35 seconds to deallocate network for instance. [ 615.304293] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f705d9-4528-40ed-a53e-88e3656fd768 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.346044] env[63028]: DEBUG oslo_concurrency.lockutils [None req-78a1acb4-c1f1-436a-833a-acfe219f578d tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lock "44fca05f-51db-4252-bcf8-6bcad37a6147" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.155s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.349642] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ccdf7a-4f0f-46eb-9a4e-a43ac05f6707 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.362526] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5d5760-c28c-410d-a7b1-9c711b789188 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.382056] env[63028]: DEBUG nova.compute.provider_tree [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.444687] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 615.444937] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f901baba-9679-46ea-ae0b-63030cb13e87 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.460026] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735039, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.462141] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 615.462141] env[63028]: value = "task-2735040" [ 615.462141] env[63028]: _type = "Task" [ 615.462141] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.473333] env[63028]: INFO nova.compute.manager [-] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Took 1.04 seconds to deallocate network for instance. [ 615.473695] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735040, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.528261] env[63028]: DEBUG nova.network.neutron [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updated VIF entry in instance network info cache for port 08a61148-5b3a-4bb0-a130-3eda62d6bf7c. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 615.528261] env[63028]: DEBUG nova.network.neutron [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance_info_cache with network_info: [{"id": "08a61148-5b3a-4bb0-a130-3eda62d6bf7c", "address": "fa:16:3e:8e:67:6c", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08a61148-5b", "ovs_interfaceid": "08a61148-5b3a-4bb0-a130-3eda62d6bf7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.673118] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735037, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.684483] env[63028]: DEBUG oslo_vmware.api [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735036, 'name': PowerOnVM_Task, 'duration_secs': 0.554702} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.684483] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 615.688587] env[63028]: DEBUG nova.compute.manager [None req-39a7ff7c-fa43-4839-8094-07bf564e5924 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 615.688853] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85905bd0-ec46-4db1-a38f-1f41d9210413 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.726322] env[63028]: DEBUG oslo_vmware.api [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735038, 'name': PowerOnVM_Task, 'duration_secs': 0.684292} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.726648] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 615.727521] env[63028]: DEBUG nova.compute.manager [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 615.728119] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574ee5b5-cdbf-4be7-9e0d-91cc19a09bc5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.814417] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.855104] env[63028]: DEBUG nova.compute.manager [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 615.886305] env[63028]: DEBUG nova.scheduler.client.report [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 615.957382] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735039, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.680319} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.957640] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] f3277886-4498-45c6-be68-e71d8293dc00/f3277886-4498-45c6-be68-e71d8293dc00.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 615.957849] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 615.958754] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c8256ad-b030-40a8-9d92-80489255ecbf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.968442] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 615.968442] env[63028]: value = "task-2735041" [ 615.968442] env[63028]: _type = "Task" [ 615.968442] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.974992] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735040, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.981033] env[63028]: DEBUG oslo_concurrency.lockutils [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.985348] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735041, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.032854] env[63028]: DEBUG oslo_concurrency.lockutils [req-9aa46dec-5a8f-463b-bd35-e5ec7448060e req-74820a13-0918-4da7-b12c-c386e7ffad91 service nova] Releasing lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.172739] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735037, 'name': CloneVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.252553] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.368516] env[63028]: DEBUG nova.network.neutron [req-dfdee910-244f-4e8b-a567-e2f000df3bfd req-cb228311-9c08-482d-9a8d-4c5a674bbd01 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updated VIF entry in instance network info cache for port 892c8e3d-851e-4ad1-bbab-938e49f4cba1. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 616.368879] env[63028]: DEBUG nova.network.neutron [req-dfdee910-244f-4e8b-a567-e2f000df3bfd req-cb228311-9c08-482d-9a8d-4c5a674bbd01 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updating instance_info_cache with network_info: [{"id": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "address": "fa:16:3e:2c:e4:c1", "network": {"id": "5f8a710c-b049-40fe-8cc4-00c97e954fd9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1979325486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "381de553d9da4c94b923d790c12a28a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap892c8e3d-85", "ovs_interfaceid": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.375138] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.396658] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.397254] env[63028]: DEBUG nova.compute.manager [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 616.399821] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.764s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.401235] env[63028]: INFO nova.compute.claims [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.432254] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Acquiring lock "8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.433033] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Lock "8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.475856] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735040, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.480765] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735041, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.672063] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735037, 'name': CloneVM_Task, 'duration_secs': 1.596755} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.672063] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Created linked-clone VM from snapshot [ 616.674211] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cacf4f-3152-4a20-9552-a6f5312b152b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.685723] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Uploading image f9287d9b-d080-4cd4-9e72-70de6b64cc6d {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 616.719541] env[63028]: DEBUG oslo_vmware.rw_handles [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 616.719541] env[63028]: value = "vm-550635" [ 616.719541] env[63028]: _type = "VirtualMachine" [ 616.719541] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 616.720053] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-697222c1-5875-4288-9795-cc0e2998db29 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.729508] env[63028]: DEBUG oslo_vmware.rw_handles [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lease: (returnval){ [ 616.729508] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5217ac54-c8a4-eac4-6e6b-9d5f78089ea1" [ 616.729508] env[63028]: _type = "HttpNfcLease" [ 616.729508] env[63028]: } obtained for exporting VM: (result){ [ 616.729508] env[63028]: value = "vm-550635" [ 616.729508] env[63028]: _type = "VirtualMachine" [ 616.729508] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 616.729864] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the lease: (returnval){ [ 616.729864] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5217ac54-c8a4-eac4-6e6b-9d5f78089ea1" [ 616.729864] env[63028]: _type = "HttpNfcLease" [ 616.729864] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 616.737761] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 616.737761] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5217ac54-c8a4-eac4-6e6b-9d5f78089ea1" [ 616.737761] env[63028]: _type = "HttpNfcLease" [ 616.737761] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 616.871863] env[63028]: DEBUG oslo_concurrency.lockutils [req-dfdee910-244f-4e8b-a567-e2f000df3bfd req-cb228311-9c08-482d-9a8d-4c5a674bbd01 service nova] Releasing lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.907024] env[63028]: DEBUG nova.compute.utils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 616.909331] env[63028]: DEBUG nova.compute.manager [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 616.909764] env[63028]: DEBUG nova.network.neutron [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 616.976607] env[63028]: DEBUG nova.policy [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc93056b710a46e2b2f3485780719323', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '178b95ba550d453db2b9868e72a8c93f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 616.982897] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735040, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.990117] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735041, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.632308} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.990117] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 616.991827] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d052f44b-420d-4cc2-8d3b-44eef7a65837 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.018817] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] f3277886-4498-45c6-be68-e71d8293dc00/f3277886-4498-45c6-be68-e71d8293dc00.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 617.019182] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73296e3c-6187-4e4f-bd17-1ecb670a3056 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.043029] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 617.043029] env[63028]: value = "task-2735043" [ 617.043029] env[63028]: _type = "Task" [ 617.043029] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.056295] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735043, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.240252] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 617.240252] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5217ac54-c8a4-eac4-6e6b-9d5f78089ea1" [ 617.240252] env[63028]: _type = "HttpNfcLease" [ 617.240252] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 617.240976] env[63028]: DEBUG oslo_vmware.rw_handles [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 617.240976] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5217ac54-c8a4-eac4-6e6b-9d5f78089ea1" [ 617.240976] env[63028]: _type = "HttpNfcLease" [ 617.240976] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 617.241934] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c05659-e938-43d3-be74-f4b5f79962d3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.252230] env[63028]: DEBUG oslo_vmware.rw_handles [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52659d9a-1c15-d407-7c5d-017202ed6fa4/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 617.252572] env[63028]: DEBUG oslo_vmware.rw_handles [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52659d9a-1c15-d407-7c5d-017202ed6fa4/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 617.368837] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b36e89cd-e99b-436e-9bb7-f6b5e8aa4398 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.419140] env[63028]: DEBUG nova.compute.manager [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 617.481967] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735040, 'name': CreateSnapshot_Task, 'duration_secs': 1.634709} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.482288] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 617.483870] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a32877-31a0-403e-91f0-3be90e328e45 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.557023] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735043, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.748186] env[63028]: DEBUG nova.network.neutron [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Successfully created port: 8f21ea1a-db48-4941-98ae-796e9dfcf6b1 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 618.000557] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837c307e-1384-4316-845f-6110b36bba22 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.012508] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 618.013232] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7de57726-cf89-4286-812e-0bb4d8012517 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.026614] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af823472-99a3-4fed-a720-fb36e05c7d04 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.031659] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 618.031659] env[63028]: value = "task-2735044" [ 618.031659] env[63028]: _type = "Task" [ 618.031659] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.078773] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4cb6bc8-d84a-4542-9503-91ca47a6a7c1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.086961] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735044, 'name': CloneVM_Task} progress is 16%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.099405] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735043, 'name': ReconfigVM_Task, 'duration_secs': 0.559181} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.101260] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfaef0da-294a-486b-af87-a3c4d97775ba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.106484] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Reconfigured VM instance instance-00000013 to attach disk [datastore1] f3277886-4498-45c6-be68-e71d8293dc00/f3277886-4498-45c6-be68-e71d8293dc00.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 618.107273] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-50212138-ba8c-4ffc-be95-0af934b09d35 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.124189] env[63028]: DEBUG nova.compute.provider_tree [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 618.129907] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 618.129907] env[63028]: value = "task-2735045" [ 618.129907] env[63028]: _type = "Task" [ 618.129907] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.138811] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735045, 'name': Rename_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.440605] env[63028]: DEBUG nova.compute.manager [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 618.479395] env[63028]: DEBUG nova.virt.hardware [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:54:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='222607185',id=33,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-411816257',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 618.479807] env[63028]: DEBUG nova.virt.hardware [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.479940] env[63028]: DEBUG nova.virt.hardware [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 618.480202] env[63028]: DEBUG nova.virt.hardware [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.480355] env[63028]: DEBUG nova.virt.hardware [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 618.480533] env[63028]: DEBUG nova.virt.hardware [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 618.480748] env[63028]: DEBUG nova.virt.hardware [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 618.481096] env[63028]: DEBUG nova.virt.hardware [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 618.481096] env[63028]: DEBUG nova.virt.hardware [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 618.481282] env[63028]: DEBUG nova.virt.hardware [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 618.481479] env[63028]: DEBUG nova.virt.hardware [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 618.482464] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a08ba5d-0682-4d2f-aecc-afbf55b82cc5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.492607] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1711bd-e2af-45c9-9bec-78b495b8acb2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.516019] env[63028]: DEBUG nova.compute.manager [req-aa1de3f7-e383-4ba2-b2de-5ce38b24eefb req-d05d06db-dae1-45ed-ab56-f2556fd4d9a8 service nova] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Received event network-vif-deleted-c324d23d-8733-4dee-a740-12bc47cfb838 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 618.545560] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735044, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.644947] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735045, 'name': Rename_Task, 'duration_secs': 0.234958} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.645701] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 618.645991] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50133f1b-cc82-465a-84ba-a859008d21f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.655993] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 618.655993] env[63028]: value = "task-2735046" [ 618.655993] env[63028]: _type = "Task" [ 618.655993] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.660643] env[63028]: ERROR nova.scheduler.client.report [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [req-9d226515-856d-41bb-835f-b9c7074527ba] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9d226515-856d-41bb-835f-b9c7074527ba"}]} [ 618.674791] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735046, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.680899] env[63028]: DEBUG nova.scheduler.client.report [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Refreshing inventories for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 618.698529] env[63028]: DEBUG nova.scheduler.client.report [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Updating ProviderTree inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 618.698773] env[63028]: DEBUG nova.compute.provider_tree [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 618.712633] env[63028]: DEBUG nova.scheduler.client.report [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Refreshing aggregate associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, aggregates: None {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 618.742164] env[63028]: DEBUG nova.scheduler.client.report [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Refreshing trait associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 619.050547] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735044, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.167781] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735046, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.180788] env[63028]: DEBUG nova.compute.manager [req-47ec47a2-fa57-42b0-bb5c-d0407418f635 req-f22cd1ba-e5ef-4280-8377-9385d6188c2e service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Received event network-changed-a290475c-c96a-4037-9a1f-e4340a86da15 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 619.180988] env[63028]: DEBUG nova.compute.manager [req-47ec47a2-fa57-42b0-bb5c-d0407418f635 req-f22cd1ba-e5ef-4280-8377-9385d6188c2e service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Refreshing instance network info cache due to event network-changed-a290475c-c96a-4037-9a1f-e4340a86da15. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 619.181222] env[63028]: DEBUG oslo_concurrency.lockutils [req-47ec47a2-fa57-42b0-bb5c-d0407418f635 req-f22cd1ba-e5ef-4280-8377-9385d6188c2e service nova] Acquiring lock "refresh_cache-4a782483-c24e-44db-b697-856c69cc4a13" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.181364] env[63028]: DEBUG oslo_concurrency.lockutils [req-47ec47a2-fa57-42b0-bb5c-d0407418f635 req-f22cd1ba-e5ef-4280-8377-9385d6188c2e service nova] Acquired lock "refresh_cache-4a782483-c24e-44db-b697-856c69cc4a13" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.181520] env[63028]: DEBUG nova.network.neutron [req-47ec47a2-fa57-42b0-bb5c-d0407418f635 req-f22cd1ba-e5ef-4280-8377-9385d6188c2e service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Refreshing network info cache for port a290475c-c96a-4037-9a1f-e4340a86da15 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 619.233897] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973e9116-1e6b-474a-8c30-616cba7c0b30 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.244200] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2490b094-8c26-4193-85c3-3e8129bc3fec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.283802] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696756a6-c9c0-4b42-80fb-73c96e742e48 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.297104] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a028656-9d6b-42cb-95c3-5af6d313f1b3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.311711] env[63028]: DEBUG nova.compute.provider_tree [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 619.401385] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Acquiring lock "8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.401934] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Lock "8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.546241] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735044, 'name': CloneVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.670539] env[63028]: DEBUG oslo_vmware.api [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735046, 'name': PowerOnVM_Task, 'duration_secs': 0.701399} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.670815] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 619.671323] env[63028]: INFO nova.compute.manager [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Took 10.70 seconds to spawn the instance on the hypervisor. [ 619.671509] env[63028]: DEBUG nova.compute.manager [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 619.672681] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c14e52-8818-447a-8a6a-5577f9e25178 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.845987] env[63028]: ERROR nova.scheduler.client.report [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [req-ab5c41d5-bda3-448a-95e8-12ea991ce42f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ab5c41d5-bda3-448a-95e8-12ea991ce42f"}]} [ 619.867934] env[63028]: DEBUG nova.scheduler.client.report [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Refreshing inventories for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 619.884969] env[63028]: DEBUG nova.scheduler.client.report [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Updating ProviderTree inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 619.885844] env[63028]: DEBUG nova.compute.provider_tree [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 619.902957] env[63028]: DEBUG nova.scheduler.client.report [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Refreshing aggregate associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, aggregates: None {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 619.925126] env[63028]: DEBUG nova.scheduler.client.report [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Refreshing trait associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 619.925889] env[63028]: DEBUG nova.compute.provider_tree [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 40 to 41 during operation: update_traits {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 619.941161] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquiring lock "44fca05f-51db-4252-bcf8-6bcad37a6147" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.942487] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lock "44fca05f-51db-4252-bcf8-6bcad37a6147" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.944253] env[63028]: INFO nova.compute.manager [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Rebooting instance [ 619.968046] env[63028]: DEBUG nova.network.neutron [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Successfully updated port: 8f21ea1a-db48-4941-98ae-796e9dfcf6b1 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 620.053423] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735044, 'name': CloneVM_Task, 'duration_secs': 1.586488} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.053751] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Created linked-clone VM from snapshot [ 620.054451] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a96749-ea32-40a1-be7b-0c39db56b8b6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.066293] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Uploading image 8e636325-3081-496c-bbf4-77d0e496843f {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 620.083344] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 620.083771] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f1bb2ab3-c033-4f4e-8e04-51060fb2fa76 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.093265] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 620.093265] env[63028]: value = "task-2735047" [ 620.093265] env[63028]: _type = "Task" [ 620.093265] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.108776] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735047, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.200983] env[63028]: INFO nova.compute.manager [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Took 30.79 seconds to build instance. [ 620.206057] env[63028]: DEBUG nova.network.neutron [req-47ec47a2-fa57-42b0-bb5c-d0407418f635 req-f22cd1ba-e5ef-4280-8377-9385d6188c2e service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Updated VIF entry in instance network info cache for port a290475c-c96a-4037-9a1f-e4340a86da15. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 620.206451] env[63028]: DEBUG nova.network.neutron [req-47ec47a2-fa57-42b0-bb5c-d0407418f635 req-f22cd1ba-e5ef-4280-8377-9385d6188c2e service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Updating instance_info_cache with network_info: [{"id": "a290475c-c96a-4037-9a1f-e4340a86da15", "address": "fa:16:3e:0a:4f:a0", "network": {"id": "eea88aa4-8c75-4cce-b1a6-7b2e64245351", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-868430466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b056498f618493295359c1784b6660a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa290475c-c9", "ovs_interfaceid": "a290475c-c96a-4037-9a1f-e4340a86da15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.384036] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "c3014718-1064-4ab9-9600-86490489ee4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.384364] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "c3014718-1064-4ab9-9600-86490489ee4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.414580] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a858eb-81d3-49cb-b99f-3a0d0582ccaa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.427272] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28312de6-47e4-4666-8457-bba447d4ba28 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.490883] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "refresh_cache-9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.490883] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquired lock "refresh_cache-9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.491281] env[63028]: DEBUG nova.network.neutron [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 620.493116] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2885f951-b311-4fcf-af04-551819954a42 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.503759] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da58a8c0-6206-461e-bc46-e64740d842be {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.523774] env[63028]: DEBUG nova.compute.provider_tree [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.526909] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquiring lock "refresh_cache-44fca05f-51db-4252-bcf8-6bcad37a6147" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.527273] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquired lock "refresh_cache-44fca05f-51db-4252-bcf8-6bcad37a6147" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.527437] env[63028]: DEBUG nova.network.neutron [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 620.611878] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735047, 'name': Destroy_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.649217] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Acquiring lock "c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.649434] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Lock "c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.680509] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "0dbafad1-ab21-439d-bc8e-e447ac33304e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.682423] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "0dbafad1-ab21-439d-bc8e-e447ac33304e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.682819] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "0dbafad1-ab21-439d-bc8e-e447ac33304e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.682922] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "0dbafad1-ab21-439d-bc8e-e447ac33304e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.684054] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "0dbafad1-ab21-439d-bc8e-e447ac33304e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.686231] env[63028]: INFO nova.compute.manager [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Terminating instance [ 620.709259] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c7c3e8ac-1fec-453a-8f4f-76e9aae0d91a tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "f3277886-4498-45c6-be68-e71d8293dc00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.314s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.709480] env[63028]: DEBUG oslo_concurrency.lockutils [req-47ec47a2-fa57-42b0-bb5c-d0407418f635 req-f22cd1ba-e5ef-4280-8377-9385d6188c2e service nova] Releasing lock "refresh_cache-4a782483-c24e-44db-b697-856c69cc4a13" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.030829] env[63028]: DEBUG nova.scheduler.client.report [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 621.041290] env[63028]: DEBUG nova.network.neutron [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.108077] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735047, 'name': Destroy_Task, 'duration_secs': 0.58355} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.108377] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Destroyed the VM [ 621.108623] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 621.111523] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c548f58b-ba73-45cd-8a4f-cc23ebbd3bdc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.117454] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 621.117454] env[63028]: value = "task-2735048" [ 621.117454] env[63028]: _type = "Task" [ 621.117454] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.134198] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735048, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.190674] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "refresh_cache-0dbafad1-ab21-439d-bc8e-e447ac33304e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.190959] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquired lock "refresh_cache-0dbafad1-ab21-439d-bc8e-e447ac33304e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.191274] env[63028]: DEBUG nova.network.neutron [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 621.211964] env[63028]: DEBUG nova.compute.manager [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 621.482460] env[63028]: DEBUG nova.network.neutron [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Updating instance_info_cache with network_info: [{"id": "8f21ea1a-db48-4941-98ae-796e9dfcf6b1", "address": "fa:16:3e:5e:61:04", "network": {"id": "bd3f74f8-d12b-4d2e-9aa9-ca1c9e766bf4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1073484567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "178b95ba550d453db2b9868e72a8c93f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f21ea1a-db", "ovs_interfaceid": "8f21ea1a-db48-4941-98ae-796e9dfcf6b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.538265] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.138s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.538854] env[63028]: DEBUG nova.compute.manager [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 621.541853] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.299s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.543546] env[63028]: INFO nova.compute.claims [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 621.632599] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735048, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.742017] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.750276] env[63028]: DEBUG nova.network.neutron [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.911475] env[63028]: DEBUG nova.network.neutron [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.921653] env[63028]: DEBUG nova.network.neutron [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Updating instance_info_cache with network_info: [{"id": "4fb8a759-76bd-4b37-a810-2665ea4a32b3", "address": "fa:16:3e:6b:a2:3a", "network": {"id": "3a62b3ba-178e-4e4a-b8ce-a36e2a779888", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1226406934-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "824cdcbbc0e1478b9d900cde707bc67b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fb8a759-76", "ovs_interfaceid": "4fb8a759-76bd-4b37-a810-2665ea4a32b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.947801] env[63028]: DEBUG nova.compute.manager [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Received event network-changed-892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 621.947801] env[63028]: DEBUG nova.compute.manager [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Refreshing instance network info cache due to event network-changed-892c8e3d-851e-4ad1-bbab-938e49f4cba1. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 621.947801] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] Acquiring lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.948108] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] Acquired lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.948243] env[63028]: DEBUG nova.network.neutron [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Refreshing network info cache for port 892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 621.989355] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Releasing lock "refresh_cache-9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.989355] env[63028]: DEBUG nova.compute.manager [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Instance network_info: |[{"id": "8f21ea1a-db48-4941-98ae-796e9dfcf6b1", "address": "fa:16:3e:5e:61:04", "network": {"id": "bd3f74f8-d12b-4d2e-9aa9-ca1c9e766bf4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1073484567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "178b95ba550d453db2b9868e72a8c93f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f21ea1a-db", "ovs_interfaceid": "8f21ea1a-db48-4941-98ae-796e9dfcf6b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 621.989501] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:61:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f21ea1a-db48-4941-98ae-796e9dfcf6b1', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 621.998141] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Creating folder: Project (178b95ba550d453db2b9868e72a8c93f). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.998860] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9762754-82ea-4037-9bd2-1679fafcdfc1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.019499] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Created folder: Project (178b95ba550d453db2b9868e72a8c93f) in parent group-v550570. [ 622.021136] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Creating folder: Instances. Parent ref: group-v550638. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 622.022893] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0051cbd-5c79-470d-8cf0-bd7b5de36e3c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.036079] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Created folder: Instances in parent group-v550638. [ 622.036373] env[63028]: DEBUG oslo.service.loopingcall [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 622.036580] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 622.036851] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32f4c296-1b91-4357-b764-7c1b992483c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.060446] env[63028]: DEBUG nova.compute.utils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 622.062795] env[63028]: DEBUG nova.compute.manager [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 622.062795] env[63028]: DEBUG nova.network.neutron [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 622.072664] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 622.072664] env[63028]: value = "task-2735051" [ 622.072664] env[63028]: _type = "Task" [ 622.072664] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.086181] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735051, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.139418] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735048, 'name': RemoveSnapshot_Task} progress is 70%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.143023] env[63028]: DEBUG nova.policy [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd93aa90d0263436a8d146ece21fa56b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b1989df63834513aa0b192beb4609ba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 622.321471] env[63028]: DEBUG nova.compute.manager [req-73f58e84-ea0a-479d-b537-f6accc8cd5b6 req-324c57d5-c40b-4b1c-8a58-5287cac7b7e2 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Received event network-changed-dbc0e58f-b646-4c47-becf-ba9c242ca9aa {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 622.321708] env[63028]: DEBUG nova.compute.manager [req-73f58e84-ea0a-479d-b537-f6accc8cd5b6 req-324c57d5-c40b-4b1c-8a58-5287cac7b7e2 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Refreshing instance network info cache due to event network-changed-dbc0e58f-b646-4c47-becf-ba9c242ca9aa. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 622.321956] env[63028]: DEBUG oslo_concurrency.lockutils [req-73f58e84-ea0a-479d-b537-f6accc8cd5b6 req-324c57d5-c40b-4b1c-8a58-5287cac7b7e2 service nova] Acquiring lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.322143] env[63028]: DEBUG oslo_concurrency.lockutils [req-73f58e84-ea0a-479d-b537-f6accc8cd5b6 req-324c57d5-c40b-4b1c-8a58-5287cac7b7e2 service nova] Acquired lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.322308] env[63028]: DEBUG nova.network.neutron [req-73f58e84-ea0a-479d-b537-f6accc8cd5b6 req-324c57d5-c40b-4b1c-8a58-5287cac7b7e2 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Refreshing network info cache for port dbc0e58f-b646-4c47-becf-ba9c242ca9aa {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 622.414840] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Releasing lock "refresh_cache-0dbafad1-ab21-439d-bc8e-e447ac33304e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.415363] env[63028]: DEBUG nova.compute.manager [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 622.415660] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 622.416634] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e54f1ed-9d05-4093-8cbf-afe59fa7dd0e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.426789] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Releasing lock "refresh_cache-44fca05f-51db-4252-bcf8-6bcad37a6147" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.431595] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 622.432727] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5db9821c-6a60-45fe-adba-4541516b78be {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.442781] env[63028]: DEBUG oslo_vmware.api [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 622.442781] env[63028]: value = "task-2735052" [ 622.442781] env[63028]: _type = "Task" [ 622.442781] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.459441] env[63028]: DEBUG oslo_vmware.api [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735052, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.570853] env[63028]: DEBUG nova.compute.manager [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 622.588038] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735051, 'name': CreateVM_Task} progress is 15%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.643836] env[63028]: DEBUG oslo_concurrency.lockutils [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.644107] env[63028]: DEBUG oslo_concurrency.lockutils [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.644345] env[63028]: DEBUG oslo_concurrency.lockutils [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.644540] env[63028]: DEBUG oslo_concurrency.lockutils [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.646137] env[63028]: DEBUG oslo_concurrency.lockutils [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.649934] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735048, 'name': RemoveSnapshot_Task} progress is 70%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.652806] env[63028]: INFO nova.compute.manager [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Terminating instance [ 622.936474] env[63028]: DEBUG nova.compute.manager [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 622.937664] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2b03a0-8fb8-47b6-b1f6-04d88215069d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.966824] env[63028]: DEBUG oslo_vmware.api [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735052, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.092026] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735051, 'name': CreateVM_Task} progress is 15%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.114294] env[63028]: DEBUG nova.network.neutron [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Successfully created port: 7efed411-b25a-4981-919a-66c96dd949ee {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 623.139296] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b64d06-527c-4512-9610-db286bb1ab93 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.143530] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735048, 'name': RemoveSnapshot_Task} progress is 70%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.149971] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5685fc5-c8eb-468d-bf10-b66b2b96bfe2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.157185] env[63028]: DEBUG nova.compute.manager [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 623.157486] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 623.189198] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53633063-11af-4554-a7a8-66830f77d368 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.195878] env[63028]: DEBUG nova.network.neutron [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updated VIF entry in instance network info cache for port 892c8e3d-851e-4ad1-bbab-938e49f4cba1. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 623.196267] env[63028]: DEBUG nova.network.neutron [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updating instance_info_cache with network_info: [{"id": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "address": "fa:16:3e:2c:e4:c1", "network": {"id": "5f8a710c-b049-40fe-8cc4-00c97e954fd9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1979325486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "381de553d9da4c94b923d790c12a28a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap892c8e3d-85", "ovs_interfaceid": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.198164] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c404066-f48f-407d-b2d5-fbabb4a9f126 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.206401] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 623.209196] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e54261e3-d310-4655-8529-ea6411587954 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.211774] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a0b32b-91a3-4d8e-bb97-f011c17ac07b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.238509] env[63028]: DEBUG nova.compute.provider_tree [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.242346] env[63028]: DEBUG oslo_vmware.api [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 623.242346] env[63028]: value = "task-2735053" [ 623.242346] env[63028]: _type = "Task" [ 623.242346] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.253809] env[63028]: DEBUG oslo_vmware.api [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.456321] env[63028]: DEBUG oslo_vmware.api [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735052, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.506690] env[63028]: DEBUG nova.network.neutron [req-73f58e84-ea0a-479d-b537-f6accc8cd5b6 req-324c57d5-c40b-4b1c-8a58-5287cac7b7e2 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Updated VIF entry in instance network info cache for port dbc0e58f-b646-4c47-becf-ba9c242ca9aa. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 623.507540] env[63028]: DEBUG nova.network.neutron [req-73f58e84-ea0a-479d-b537-f6accc8cd5b6 req-324c57d5-c40b-4b1c-8a58-5287cac7b7e2 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Updating instance_info_cache with network_info: [{"id": "dbc0e58f-b646-4c47-becf-ba9c242ca9aa", "address": "fa:16:3e:f3:d3:54", "network": {"id": "5f8a710c-b049-40fe-8cc4-00c97e954fd9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1979325486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "381de553d9da4c94b923d790c12a28a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbc0e58f-b6", "ovs_interfaceid": "dbc0e58f-b646-4c47-becf-ba9c242ca9aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.587344] env[63028]: DEBUG nova.compute.manager [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 623.589428] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735051, 'name': CreateVM_Task} progress is 15%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.627664] env[63028]: DEBUG nova.virt.hardware [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 623.627975] env[63028]: DEBUG nova.virt.hardware [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.628117] env[63028]: DEBUG nova.virt.hardware [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 623.628816] env[63028]: DEBUG nova.virt.hardware [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.628816] env[63028]: DEBUG nova.virt.hardware [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 623.628816] env[63028]: DEBUG nova.virt.hardware [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 623.628816] env[63028]: DEBUG nova.virt.hardware [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 623.629110] env[63028]: DEBUG nova.virt.hardware [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 623.629110] env[63028]: DEBUG nova.virt.hardware [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 623.629270] env[63028]: DEBUG nova.virt.hardware [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 623.629549] env[63028]: DEBUG nova.virt.hardware [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 623.630405] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14e4af7-a057-43fb-acb8-42e77e0b4ea4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.650700] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735048, 'name': RemoveSnapshot_Task} progress is 70%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.652048] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075c8fa8-6c36-4362-8d89-59811b4dcdb1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.702043] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] Releasing lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.702401] env[63028]: DEBUG nova.compute.manager [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Received event network-changed-dbc0e58f-b646-4c47-becf-ba9c242ca9aa {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 623.702774] env[63028]: DEBUG nova.compute.manager [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Refreshing instance network info cache due to event network-changed-dbc0e58f-b646-4c47-becf-ba9c242ca9aa. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 623.703036] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] Acquiring lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.743577] env[63028]: DEBUG nova.scheduler.client.report [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 623.761857] env[63028]: DEBUG oslo_vmware.api [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735053, 'name': PowerOffVM_Task, 'duration_secs': 0.21197} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.761857] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 623.761983] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 623.762296] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b57a76a-db49-454d-b0d8-cec3666d5608 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.790275] env[63028]: DEBUG nova.compute.manager [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Stashing vm_state: active {{(pid=63028) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 623.838789] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 623.839299] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 623.839299] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Deleting the datastore file [datastore2] c7a3f2c6-8368-49cc-9737-ea1d836f1783 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.839476] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f914ec71-0434-4e08-ad42-e4a194886b43 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.849212] env[63028]: DEBUG oslo_vmware.api [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 623.849212] env[63028]: value = "task-2735055" [ 623.849212] env[63028]: _type = "Task" [ 623.849212] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.864453] env[63028]: DEBUG oslo_vmware.api [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735055, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.959666] env[63028]: DEBUG oslo_vmware.api [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735052, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.966259] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0284e045-8295-4caa-a3bc-7bbc3684aa50 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.976743] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Doing hard reboot of VM {{(pid=63028) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 623.977589] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-760f9640-9120-4d87-9948-72b1fcca6ecd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.986147] env[63028]: DEBUG oslo_vmware.api [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for the task: (returnval){ [ 623.986147] env[63028]: value = "task-2735056" [ 623.986147] env[63028]: _type = "Task" [ 623.986147] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.996589] env[63028]: DEBUG oslo_vmware.api [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735056, 'name': ResetVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.010551] env[63028]: DEBUG oslo_concurrency.lockutils [req-73f58e84-ea0a-479d-b537-f6accc8cd5b6 req-324c57d5-c40b-4b1c-8a58-5287cac7b7e2 service nova] Releasing lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.012111] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] Acquired lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.012111] env[63028]: DEBUG nova.network.neutron [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Refreshing network info cache for port dbc0e58f-b646-4c47-becf-ba9c242ca9aa {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 624.093938] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735051, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.143837] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735048, 'name': RemoveSnapshot_Task} progress is 98%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.255034] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.256435] env[63028]: DEBUG nova.compute.manager [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 624.264255] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.064s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.265776] env[63028]: INFO nova.compute.claims [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 624.319084] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.363219] env[63028]: DEBUG oslo_vmware.api [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735055, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.472452} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.363546] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 624.363758] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 624.363941] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 624.364257] env[63028]: INFO nova.compute.manager [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Took 1.21 seconds to destroy the instance on the hypervisor. [ 624.364548] env[63028]: DEBUG oslo.service.loopingcall [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 624.364746] env[63028]: DEBUG nova.compute.manager [-] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 624.365029] env[63028]: DEBUG nova.network.neutron [-] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 624.462201] env[63028]: DEBUG oslo_vmware.api [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735052, 'name': PowerOffVM_Task, 'duration_secs': 1.770915} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.462201] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 624.462201] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 624.462201] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59ef0138-b53d-4691-9a45-0d296ea6edd1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.498377] env[63028]: DEBUG oslo_vmware.api [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735056, 'name': ResetVM_Task, 'duration_secs': 0.139348} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.501178] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Did hard reboot of VM {{(pid=63028) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 624.502036] env[63028]: DEBUG nova.compute.manager [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 624.503729] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a263b05f-3205-4308-a355-17d5a0e3611b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.522869] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 624.523119] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 624.523311] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Deleting the datastore file [datastore1] 0dbafad1-ab21-439d-bc8e-e447ac33304e {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 624.523899] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d75c112-2d3a-4208-936b-8e57eda5fc96 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.532838] env[63028]: DEBUG oslo_vmware.api [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 624.532838] env[63028]: value = "task-2735058" [ 624.532838] env[63028]: _type = "Task" [ 624.532838] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.546347] env[63028]: DEBUG oslo_vmware.api [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.588775] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735051, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.646727] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735048, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.771322] env[63028]: DEBUG nova.compute.utils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 624.775964] env[63028]: DEBUG nova.compute.manager [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Not allocating networking since 'none' was specified. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 625.031380] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a9a09600-a9d3-49d5-8805-1ca8f0f010c9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lock "44fca05f-51db-4252-bcf8-6bcad37a6147" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.090s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.795711] env[63028]: DEBUG nova.compute.manager [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 625.804189] env[63028]: DEBUG oslo_vmware.api [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198864} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.811165] env[63028]: DEBUG nova.network.neutron [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Successfully updated port: 7efed411-b25a-4981-919a-66c96dd949ee {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 625.821413] env[63028]: DEBUG nova.compute.manager [req-745f3720-b613-4f1c-9a73-18b873cc9f3e req-1baa1cec-b7e5-424a-a8dc-e86eaefb23c4 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Received event network-changed-892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 625.821413] env[63028]: DEBUG nova.compute.manager [req-745f3720-b613-4f1c-9a73-18b873cc9f3e req-1baa1cec-b7e5-424a-a8dc-e86eaefb23c4 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Refreshing instance network info cache due to event network-changed-892c8e3d-851e-4ad1-bbab-938e49f4cba1. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 625.821614] env[63028]: DEBUG oslo_concurrency.lockutils [req-745f3720-b613-4f1c-9a73-18b873cc9f3e req-1baa1cec-b7e5-424a-a8dc-e86eaefb23c4 service nova] Acquiring lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.821676] env[63028]: DEBUG oslo_concurrency.lockutils [req-745f3720-b613-4f1c-9a73-18b873cc9f3e req-1baa1cec-b7e5-424a-a8dc-e86eaefb23c4 service nova] Acquired lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.822046] env[63028]: DEBUG nova.network.neutron [req-745f3720-b613-4f1c-9a73-18b873cc9f3e req-1baa1cec-b7e5-424a-a8dc-e86eaefb23c4 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Refreshing network info cache for port 892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 625.824521] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 625.824638] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 625.824807] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 625.824970] env[63028]: INFO nova.compute.manager [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Took 3.41 seconds to destroy the instance on the hypervisor. [ 625.825227] env[63028]: DEBUG oslo.service.loopingcall [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 625.827804] env[63028]: DEBUG nova.compute.manager [-] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 625.827967] env[63028]: DEBUG nova.network.neutron [-] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 625.843422] env[63028]: DEBUG oslo_vmware.api [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735048, 'name': RemoveSnapshot_Task, 'duration_secs': 3.564115} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.843833] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735051, 'name': CreateVM_Task, 'duration_secs': 3.078868} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.848267] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 625.850919] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 625.854043] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.854043] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.854043] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 625.854988] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4d687c8-7b93-48a2-a95f-f3a6f993f2e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.864487] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 625.864487] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52186588-28b5-0574-eb17-796109b6a23f" [ 625.864487] env[63028]: _type = "Task" [ 625.864487] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.879455] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52186588-28b5-0574-eb17-796109b6a23f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.910232] env[63028]: DEBUG nova.network.neutron [-] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.084277] env[63028]: DEBUG nova.network.neutron [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Updated VIF entry in instance network info cache for port dbc0e58f-b646-4c47-becf-ba9c242ca9aa. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 626.084639] env[63028]: DEBUG nova.network.neutron [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Updating instance_info_cache with network_info: [{"id": "dbc0e58f-b646-4c47-becf-ba9c242ca9aa", "address": "fa:16:3e:f3:d3:54", "network": {"id": "5f8a710c-b049-40fe-8cc4-00c97e954fd9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1979325486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "381de553d9da4c94b923d790c12a28a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbc0e58f-b6", "ovs_interfaceid": "dbc0e58f-b646-4c47-becf-ba9c242ca9aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.137970] env[63028]: DEBUG nova.compute.manager [req-a51f1330-93a3-4c97-a991-f6c190e3f64e req-4f02d17a-4a6b-47d6-80a5-af4f43d8c834 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Received event network-vif-deleted-dbc0e58f-b646-4c47-becf-ba9c242ca9aa {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 626.138224] env[63028]: INFO nova.compute.manager [req-a51f1330-93a3-4c97-a991-f6c190e3f64e req-4f02d17a-4a6b-47d6-80a5-af4f43d8c834 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Neutron deleted interface dbc0e58f-b646-4c47-becf-ba9c242ca9aa; detaching it from the instance and deleting it from the info cache [ 626.138362] env[63028]: DEBUG nova.network.neutron [req-a51f1330-93a3-4c97-a991-f6c190e3f64e req-4f02d17a-4a6b-47d6-80a5-af4f43d8c834 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.326609] env[63028]: DEBUG nova.network.neutron [-] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.331876] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquiring lock "refresh_cache-ddf20137-4d63-4c7a-b519-445719265e1d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.331999] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquired lock "refresh_cache-ddf20137-4d63-4c7a-b519-445719265e1d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.332134] env[63028]: DEBUG nova.network.neutron [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 626.362064] env[63028]: WARNING nova.compute.manager [None req-9afbf1dd-406f-4fbb-ac83-9f6278345562 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Image not found during snapshot: nova.exception.ImageNotFound: Image 8e636325-3081-496c-bbf4-77d0e496843f could not be found. [ 626.384386] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52186588-28b5-0574-eb17-796109b6a23f, 'name': SearchDatastore_Task, 'duration_secs': 0.026155} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.385920] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.385920] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 626.385920] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.385920] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.386173] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 626.386533] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53f065ae-5b8b-4964-801d-bbd6f9ac9fda {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.400526] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 626.400757] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 626.402060] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a927fce-6608-4f60-9ef4-80f9d0f0807f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.412568] env[63028]: DEBUG nova.network.neutron [-] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.413971] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 626.413971] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523ba68b-124e-f809-a066-9fe9143ad8a4" [ 626.413971] env[63028]: _type = "Task" [ 626.413971] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.424267] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523ba68b-124e-f809-a066-9fe9143ad8a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.426208] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79011433-2088-4007-b00e-3e6d1331d412 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.435781] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b19d03-9ce1-44cc-914e-ceeb33189eec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.470521] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b911661c-9b4b-4aeb-9f74-7c07cb4e9fef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.479961] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459529bc-4dff-4a7c-9a37-a835c9515b57 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.495407] env[63028]: DEBUG nova.compute.provider_tree [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 626.592247] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] Releasing lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.592724] env[63028]: DEBUG nova.compute.manager [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Received event network-vif-plugged-8f21ea1a-db48-4941-98ae-796e9dfcf6b1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 626.592845] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] Acquiring lock "9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.593085] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] Lock "9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.593272] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] Lock "9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.593444] env[63028]: DEBUG nova.compute.manager [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] No waiting events found dispatching network-vif-plugged-8f21ea1a-db48-4941-98ae-796e9dfcf6b1 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 626.593626] env[63028]: WARNING nova.compute.manager [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Received unexpected event network-vif-plugged-8f21ea1a-db48-4941-98ae-796e9dfcf6b1 for instance with vm_state building and task_state spawning. [ 626.593791] env[63028]: DEBUG nova.compute.manager [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Received event network-changed-8f21ea1a-db48-4941-98ae-796e9dfcf6b1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 626.593960] env[63028]: DEBUG nova.compute.manager [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Refreshing instance network info cache due to event network-changed-8f21ea1a-db48-4941-98ae-796e9dfcf6b1. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 626.594155] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] Acquiring lock "refresh_cache-9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.594308] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] Acquired lock "refresh_cache-9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.594472] env[63028]: DEBUG nova.network.neutron [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Refreshing network info cache for port 8f21ea1a-db48-4941-98ae-796e9dfcf6b1 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 626.642232] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fea4d412-dc78-4064-9777-8504dc76f023 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.656037] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e18f0c-daa9-4608-835c-737a5767bcd5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.693731] env[63028]: DEBUG nova.compute.manager [req-a51f1330-93a3-4c97-a991-f6c190e3f64e req-4f02d17a-4a6b-47d6-80a5-af4f43d8c834 service nova] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Detach interface failed, port_id=dbc0e58f-b646-4c47-becf-ba9c242ca9aa, reason: Instance c7a3f2c6-8368-49cc-9737-ea1d836f1783 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 626.812192] env[63028]: DEBUG nova.compute.manager [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 626.815569] env[63028]: DEBUG nova.network.neutron [req-745f3720-b613-4f1c-9a73-18b873cc9f3e req-1baa1cec-b7e5-424a-a8dc-e86eaefb23c4 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updated VIF entry in instance network info cache for port 892c8e3d-851e-4ad1-bbab-938e49f4cba1. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 626.815920] env[63028]: DEBUG nova.network.neutron [req-745f3720-b613-4f1c-9a73-18b873cc9f3e req-1baa1cec-b7e5-424a-a8dc-e86eaefb23c4 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updating instance_info_cache with network_info: [{"id": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "address": "fa:16:3e:2c:e4:c1", "network": {"id": "5f8a710c-b049-40fe-8cc4-00c97e954fd9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1979325486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "381de553d9da4c94b923d790c12a28a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap892c8e3d-85", "ovs_interfaceid": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.834881] env[63028]: INFO nova.compute.manager [-] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Took 2.47 seconds to deallocate network for instance. [ 626.843934] env[63028]: DEBUG nova.virt.hardware [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 626.844176] env[63028]: DEBUG nova.virt.hardware [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 626.844340] env[63028]: DEBUG nova.virt.hardware [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 626.844484] env[63028]: DEBUG nova.virt.hardware [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 626.844634] env[63028]: DEBUG nova.virt.hardware [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 626.845253] env[63028]: DEBUG nova.virt.hardware [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 626.845475] env[63028]: DEBUG nova.virt.hardware [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 626.845716] env[63028]: DEBUG nova.virt.hardware [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 626.845900] env[63028]: DEBUG nova.virt.hardware [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 626.846081] env[63028]: DEBUG nova.virt.hardware [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 626.846294] env[63028]: DEBUG nova.virt.hardware [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 626.847453] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c94fc9-c60d-4bad-bd1c-01335e429545 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.865461] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca90edf-1294-4943-8350-2d539e7075ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.886502] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Instance VIF info [] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 626.893016] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Creating folder: Project (3dc183aa6e234633b7e5a8aae4eb8b19). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.894106] env[63028]: DEBUG nova.network.neutron [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.896073] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7f4719d-ef3c-47d2-a14d-05cf7f836527 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.910705] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Created folder: Project (3dc183aa6e234633b7e5a8aae4eb8b19) in parent group-v550570. [ 626.910950] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Creating folder: Instances. Parent ref: group-v550641. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.911180] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35b77c4e-6dc7-477b-9652-0d7238f2a79b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.914912] env[63028]: INFO nova.compute.manager [-] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Took 1.09 seconds to deallocate network for instance. [ 626.928024] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Created folder: Instances in parent group-v550641. [ 626.928286] env[63028]: DEBUG oslo.service.loopingcall [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 626.930879] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 626.941019] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d3544ea-d110-4c75-8ad8-9c04c71f2e79 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.954430] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523ba68b-124e-f809-a066-9fe9143ad8a4, 'name': SearchDatastore_Task, 'duration_secs': 0.032445} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.955701] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b8dca3d-88dc-4426-9d01-f078c33e0ff3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.961881] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 626.961881] env[63028]: value = "task-2735061" [ 626.961881] env[63028]: _type = "Task" [ 626.961881] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.963564] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 626.963564] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522cb1c8-6704-ea7e-5dac-6b747a32bdf0" [ 626.963564] env[63028]: _type = "Task" [ 626.963564] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.978025] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735061, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.980692] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522cb1c8-6704-ea7e-5dac-6b747a32bdf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.024806] env[63028]: ERROR nova.scheduler.client.report [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [req-4d9517b6-c5e7-44df-bcbf-91fe577493db] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4d9517b6-c5e7-44df-bcbf-91fe577493db"}]} [ 627.048699] env[63028]: DEBUG nova.scheduler.client.report [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Refreshing inventories for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 627.064760] env[63028]: DEBUG nova.scheduler.client.report [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Updating ProviderTree inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 627.065376] env[63028]: DEBUG nova.compute.provider_tree [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 627.080731] env[63028]: DEBUG nova.scheduler.client.report [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Refreshing aggregate associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, aggregates: None {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 627.087354] env[63028]: DEBUG nova.network.neutron [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Updating instance_info_cache with network_info: [{"id": "7efed411-b25a-4981-919a-66c96dd949ee", "address": "fa:16:3e:f7:4a:16", "network": {"id": "863c7a3c-a546-44b9-b96f-a1782de90239", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2034296608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b1989df63834513aa0b192beb4609ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7efed411-b2", "ovs_interfaceid": "7efed411-b25a-4981-919a-66c96dd949ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.100779] env[63028]: DEBUG nova.scheduler.client.report [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Refreshing trait associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 627.228893] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquiring lock "44fca05f-51db-4252-bcf8-6bcad37a6147" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.228893] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lock "44fca05f-51db-4252-bcf8-6bcad37a6147" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.228893] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquiring lock "44fca05f-51db-4252-bcf8-6bcad37a6147-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.229060] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lock "44fca05f-51db-4252-bcf8-6bcad37a6147-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.230225] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lock "44fca05f-51db-4252-bcf8-6bcad37a6147-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.232452] env[63028]: INFO nova.compute.manager [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Terminating instance [ 627.318234] env[63028]: DEBUG oslo_concurrency.lockutils [req-745f3720-b613-4f1c-9a73-18b873cc9f3e req-1baa1cec-b7e5-424a-a8dc-e86eaefb23c4 service nova] Releasing lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.361256] env[63028]: DEBUG oslo_concurrency.lockutils [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.377642] env[63028]: DEBUG nova.network.neutron [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Updated VIF entry in instance network info cache for port 8f21ea1a-db48-4941-98ae-796e9dfcf6b1. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 627.378024] env[63028]: DEBUG nova.network.neutron [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Updating instance_info_cache with network_info: [{"id": "8f21ea1a-db48-4941-98ae-796e9dfcf6b1", "address": "fa:16:3e:5e:61:04", "network": {"id": "bd3f74f8-d12b-4d2e-9aa9-ca1c9e766bf4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1073484567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "178b95ba550d453db2b9868e72a8c93f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f21ea1a-db", "ovs_interfaceid": "8f21ea1a-db48-4941-98ae-796e9dfcf6b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.401476] env[63028]: DEBUG oslo_vmware.rw_handles [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52659d9a-1c15-d407-7c5d-017202ed6fa4/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 627.403399] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb5af80-20df-4906-9179-340355cd80bf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.410656] env[63028]: DEBUG oslo_vmware.rw_handles [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52659d9a-1c15-d407-7c5d-017202ed6fa4/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 627.410656] env[63028]: ERROR oslo_vmware.rw_handles [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52659d9a-1c15-d407-7c5d-017202ed6fa4/disk-0.vmdk due to incomplete transfer. [ 627.410807] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e8de75db-08e1-4082-becf-977c3d0c93a8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.419811] env[63028]: DEBUG oslo_vmware.rw_handles [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52659d9a-1c15-d407-7c5d-017202ed6fa4/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 627.420080] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Uploaded image f9287d9b-d080-4cd4-9e72-70de6b64cc6d to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 627.422525] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 627.422951] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-932a902b-1772-4be5-a902-b1d3f326a7d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.430531] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.432045] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 627.432045] env[63028]: value = "task-2735062" [ 627.432045] env[63028]: _type = "Task" [ 627.432045] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.448736] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735062, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.482598] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522cb1c8-6704-ea7e-5dac-6b747a32bdf0, 'name': SearchDatastore_Task, 'duration_secs': 0.029156} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.486796] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.487124] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0/9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 627.487370] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735061, 'name': CreateVM_Task, 'duration_secs': 0.380883} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.487755] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6736effd-1e22-4180-804b-ffc03e416ed4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.489695] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 627.490154] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.490322] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.491192] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 627.492060] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0332613d-20c5-40d8-9508-0f9fc6a7f36c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.497366] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 627.497366] env[63028]: value = "task-2735063" [ 627.497366] env[63028]: _type = "Task" [ 627.497366] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.503746] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 627.503746] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5238c27f-cb3d-78b2-3244-d93f34ef848c" [ 627.503746] env[63028]: _type = "Task" [ 627.503746] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.512867] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.521031] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5238c27f-cb3d-78b2-3244-d93f34ef848c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.591286] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Releasing lock "refresh_cache-ddf20137-4d63-4c7a-b519-445719265e1d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.591928] env[63028]: DEBUG nova.compute.manager [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Instance network_info: |[{"id": "7efed411-b25a-4981-919a-66c96dd949ee", "address": "fa:16:3e:f7:4a:16", "network": {"id": "863c7a3c-a546-44b9-b96f-a1782de90239", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2034296608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b1989df63834513aa0b192beb4609ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7efed411-b2", "ovs_interfaceid": "7efed411-b25a-4981-919a-66c96dd949ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 627.592457] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:4a:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7cd4cea-788c-4e6d-9df8-5d83838e2e2a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7efed411-b25a-4981-919a-66c96dd949ee', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 627.602706] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Creating folder: Project (5b1989df63834513aa0b192beb4609ba). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 627.603249] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d62ffe0a-7bea-440c-b858-d5ef041a4904 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.616975] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Created folder: Project (5b1989df63834513aa0b192beb4609ba) in parent group-v550570. [ 627.617258] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Creating folder: Instances. Parent ref: group-v550644. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 627.617690] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4147e47e-0512-4dd6-a4f6-18bedb88dbbb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.631711] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Created folder: Instances in parent group-v550644. [ 627.632028] env[63028]: DEBUG oslo.service.loopingcall [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 627.632221] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 627.632467] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-799e7192-5ada-4485-9d19-ac39c7e4f881 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.657789] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 627.657789] env[63028]: value = "task-2735066" [ 627.657789] env[63028]: _type = "Task" [ 627.657789] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.667406] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735066, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.671364] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df728a3-d1de-49ab-b909-6eed884854fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.680061] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01191ca1-3e0d-493e-a767-610b4ca461df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.715809] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b1bc90-6218-459c-b2c4-d128c0a55ec8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.726835] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365e41ee-e205-4824-8bcb-af23265b59e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.742105] env[63028]: DEBUG nova.compute.manager [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 627.742317] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 627.742858] env[63028]: DEBUG nova.compute.provider_tree [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 627.744710] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f83a7f-0721-477d-a1b2-4f03ee32c2e6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.753512] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 627.754387] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c9e9db7-d08a-4207-afc2-529df4c17bd9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.761956] env[63028]: DEBUG oslo_vmware.api [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for the task: (returnval){ [ 627.761956] env[63028]: value = "task-2735067" [ 627.761956] env[63028]: _type = "Task" [ 627.761956] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.772769] env[63028]: DEBUG oslo_vmware.api [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735067, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.884417] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4b5738d-a884-4e29-bfce-1ec071a6c8a7 req-c285527d-6379-4c08-9eac-a5b92da21743 service nova] Releasing lock "refresh_cache-9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.952050] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735062, 'name': Destroy_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.015359] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735063, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.019859] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5238c27f-cb3d-78b2-3244-d93f34ef848c, 'name': SearchDatastore_Task, 'duration_secs': 0.020265} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.020296] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.020578] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 628.020754] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.021199] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.021199] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 628.021399] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49581ccc-0821-4860-8e1c-2f474c839c5e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.035065] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 628.035065] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 628.035423] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f8c5d9c-91e7-4479-8889-488aa97238f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.044208] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 628.044208] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521afdbf-b311-2f68-c85d-acf75a633c65" [ 628.044208] env[63028]: _type = "Task" [ 628.044208] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.058136] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521afdbf-b311-2f68-c85d-acf75a633c65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.173480] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735066, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.271108] env[63028]: ERROR nova.scheduler.client.report [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [req-e58c42ba-cfdf-4f2c-b73f-42283d215b78] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e58c42ba-cfdf-4f2c-b73f-42283d215b78"}]} [ 628.277230] env[63028]: DEBUG oslo_vmware.api [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735067, 'name': PowerOffVM_Task, 'duration_secs': 0.505988} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.277785] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 628.277785] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 628.278236] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e59e32a-89eb-474d-8328-d35e487ee935 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.292032] env[63028]: DEBUG nova.scheduler.client.report [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Refreshing inventories for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 628.306095] env[63028]: DEBUG nova.scheduler.client.report [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Updating ProviderTree inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 628.306095] env[63028]: DEBUG nova.compute.provider_tree [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 628.320979] env[63028]: DEBUG nova.scheduler.client.report [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Refreshing aggregate associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, aggregates: None {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 628.336213] env[63028]: DEBUG nova.compute.manager [req-90414c49-054b-4d69-b390-5fca153c919d req-bfee1ace-0c23-4399-8055-90cd07a53aba service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Received event network-changed-892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 628.338316] env[63028]: DEBUG nova.compute.manager [req-90414c49-054b-4d69-b390-5fca153c919d req-bfee1ace-0c23-4399-8055-90cd07a53aba service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Refreshing instance network info cache due to event network-changed-892c8e3d-851e-4ad1-bbab-938e49f4cba1. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 628.338465] env[63028]: DEBUG oslo_concurrency.lockutils [req-90414c49-054b-4d69-b390-5fca153c919d req-bfee1ace-0c23-4399-8055-90cd07a53aba service nova] Acquiring lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.338615] env[63028]: DEBUG oslo_concurrency.lockutils [req-90414c49-054b-4d69-b390-5fca153c919d req-bfee1ace-0c23-4399-8055-90cd07a53aba service nova] Acquired lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.338781] env[63028]: DEBUG nova.network.neutron [req-90414c49-054b-4d69-b390-5fca153c919d req-bfee1ace-0c23-4399-8055-90cd07a53aba service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Refreshing network info cache for port 892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 628.344610] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 628.344610] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 628.344610] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Deleting the datastore file [datastore2] 44fca05f-51db-4252-bcf8-6bcad37a6147 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 628.345337] env[63028]: DEBUG nova.scheduler.client.report [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Refreshing trait associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 628.347601] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0d81b2c-e65e-41cd-a7e0-3658ffbf3450 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.356284] env[63028]: DEBUG oslo_vmware.api [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for the task: (returnval){ [ 628.356284] env[63028]: value = "task-2735069" [ 628.356284] env[63028]: _type = "Task" [ 628.356284] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.366812] env[63028]: DEBUG oslo_vmware.api [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.450496] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735062, 'name': Destroy_Task, 'duration_secs': 0.674679} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.453735] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Destroyed the VM [ 628.453735] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 628.454133] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-182d01ea-e2b8-4423-9867-ae23a5b6dd6f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.462837] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 628.462837] env[63028]: value = "task-2735070" [ 628.462837] env[63028]: _type = "Task" [ 628.462837] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.475140] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735070, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.513239] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735063, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.852977} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.513625] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0/9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 628.514161] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 628.514161] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a8086e6-16bb-4f82-8a5d-f066af63d92b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.528248] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 628.528248] env[63028]: value = "task-2735071" [ 628.528248] env[63028]: _type = "Task" [ 628.528248] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.544115] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "d6137c80-0c09-4655-b264-472753b4fa9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.544446] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "d6137c80-0c09-4655-b264-472753b4fa9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.544698] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735071, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.561722] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521afdbf-b311-2f68-c85d-acf75a633c65, 'name': SearchDatastore_Task, 'duration_secs': 0.024171} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.563896] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60be7bb0-00ac-446e-8dbf-f358531a0940 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.570682] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 628.570682] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52615eed-e0af-d92c-1e4a-3be83929e25c" [ 628.570682] env[63028]: _type = "Task" [ 628.570682] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.584305] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52615eed-e0af-d92c-1e4a-3be83929e25c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.614892] env[63028]: DEBUG nova.compute.manager [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Received event network-vif-plugged-7efed411-b25a-4981-919a-66c96dd949ee {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 628.615209] env[63028]: DEBUG oslo_concurrency.lockutils [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] Acquiring lock "ddf20137-4d63-4c7a-b519-445719265e1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.615444] env[63028]: DEBUG oslo_concurrency.lockutils [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] Lock "ddf20137-4d63-4c7a-b519-445719265e1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.615651] env[63028]: DEBUG oslo_concurrency.lockutils [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] Lock "ddf20137-4d63-4c7a-b519-445719265e1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.615847] env[63028]: DEBUG nova.compute.manager [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] No waiting events found dispatching network-vif-plugged-7efed411-b25a-4981-919a-66c96dd949ee {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 628.616033] env[63028]: WARNING nova.compute.manager [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Received unexpected event network-vif-plugged-7efed411-b25a-4981-919a-66c96dd949ee for instance with vm_state building and task_state spawning. [ 628.616231] env[63028]: DEBUG nova.compute.manager [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Received event network-changed-7efed411-b25a-4981-919a-66c96dd949ee {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 628.616413] env[63028]: DEBUG nova.compute.manager [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Refreshing instance network info cache due to event network-changed-7efed411-b25a-4981-919a-66c96dd949ee. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 628.616647] env[63028]: DEBUG oslo_concurrency.lockutils [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] Acquiring lock "refresh_cache-ddf20137-4d63-4c7a-b519-445719265e1d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.616759] env[63028]: DEBUG oslo_concurrency.lockutils [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] Acquired lock "refresh_cache-ddf20137-4d63-4c7a-b519-445719265e1d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.616930] env[63028]: DEBUG nova.network.neutron [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Refreshing network info cache for port 7efed411-b25a-4981-919a-66c96dd949ee {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 628.668960] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735066, 'name': CreateVM_Task, 'duration_secs': 0.981945} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.671577] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 628.673048] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.673048] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.673281] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 628.673385] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c0ae361-9d12-44ed-a637-8e41b569ee41 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.678385] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Waiting for the task: (returnval){ [ 628.678385] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e7d2f3-1cb3-45e5-a8cb-b1f19f9d39f0" [ 628.678385] env[63028]: _type = "Task" [ 628.678385] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.692830] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e7d2f3-1cb3-45e5-a8cb-b1f19f9d39f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.698971] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.840610] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661cb318-2fa8-4b6a-b4d6-57c0d5b6648c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.851500] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19517ea-1de5-41f8-8286-0cc501b3b503 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.898597] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e6cf24-05e3-4ad8-b786-4e190bfa65b3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.917382] env[63028]: DEBUG oslo_vmware.api [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Task: {'id': task-2735069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310289} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.920585] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "e20ed04f-205b-4aa9-b8b6-e352cd237412" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.920858] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "e20ed04f-205b-4aa9-b8b6-e352cd237412" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.921101] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "e20ed04f-205b-4aa9-b8b6-e352cd237412-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.921301] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "e20ed04f-205b-4aa9-b8b6-e352cd237412-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.921515] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "e20ed04f-205b-4aa9-b8b6-e352cd237412-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.923595] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 628.923789] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 628.923964] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 628.924174] env[63028]: INFO nova.compute.manager [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Took 1.18 seconds to destroy the instance on the hypervisor. [ 628.924431] env[63028]: DEBUG oslo.service.loopingcall [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 628.927070] env[63028]: INFO nova.compute.manager [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Terminating instance [ 628.928495] env[63028]: DEBUG nova.compute.manager [-] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 628.932331] env[63028]: DEBUG nova.network.neutron [-] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 628.939312] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c47a8a8-a0fa-42a0-bfe6-1192b2263754 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.955047] env[63028]: DEBUG nova.compute.provider_tree [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 628.973033] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735070, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.037028] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735071, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078409} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.037344] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 629.038223] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8180e5-6e00-4037-ba4d-f459eab56441 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.062545] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0/9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 629.062877] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75857635-3f57-4bf8-bbfa-7c34851dc474 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.092047] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52615eed-e0af-d92c-1e4a-3be83929e25c, 'name': SearchDatastore_Task, 'duration_secs': 0.021436} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.093484] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.093770] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 2ae111f7-4eaa-46c2-ab97-907daa913834/2ae111f7-4eaa-46c2-ab97-907daa913834.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 629.094102] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 629.094102] env[63028]: value = "task-2735072" [ 629.094102] env[63028]: _type = "Task" [ 629.094102] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.094337] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3295bf5-55a6-44e4-8551-70c75938b57e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.107249] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735072, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.108687] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 629.108687] env[63028]: value = "task-2735073" [ 629.108687] env[63028]: _type = "Task" [ 629.108687] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.119099] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735073, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.197043] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e7d2f3-1cb3-45e5-a8cb-b1f19f9d39f0, 'name': SearchDatastore_Task, 'duration_secs': 0.045423} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.197043] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.197043] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 629.197043] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.197476] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.197476] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 629.197648] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bca25b82-6507-4ddd-a335-d4b4e606b11c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.211073] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 629.211338] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 629.212131] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a66bb12-50f0-4c1c-b084-5843b1014f24 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.218538] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Waiting for the task: (returnval){ [ 629.218538] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521ebec5-d286-dab5-9c7b-f3414d42721b" [ 629.218538] env[63028]: _type = "Task" [ 629.218538] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.227433] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521ebec5-d286-dab5-9c7b-f3414d42721b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.445246] env[63028]: DEBUG nova.compute.manager [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 629.445492] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 629.447322] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879aa671-3fba-46e1-8f67-883d524fa892 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.458123] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 629.458992] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.459272] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc9f6120-960d-49ea-89df-4abb02190d92 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.463561] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.471765] env[63028]: DEBUG oslo_vmware.api [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 629.471765] env[63028]: value = "task-2735074" [ 629.471765] env[63028]: _type = "Task" [ 629.471765] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.476210] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735070, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.490185] env[63028]: DEBUG oslo_vmware.api [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735074, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.509728] env[63028]: DEBUG nova.scheduler.client.report [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Updated inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with generation 45 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 629.510150] env[63028]: DEBUG nova.compute.provider_tree [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 45 to 46 during operation: update_inventory {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 629.510480] env[63028]: DEBUG nova.compute.provider_tree [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 629.587182] env[63028]: DEBUG nova.network.neutron [req-90414c49-054b-4d69-b390-5fca153c919d req-bfee1ace-0c23-4399-8055-90cd07a53aba service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updated VIF entry in instance network info cache for port 892c8e3d-851e-4ad1-bbab-938e49f4cba1. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 629.588127] env[63028]: DEBUG nova.network.neutron [req-90414c49-054b-4d69-b390-5fca153c919d req-bfee1ace-0c23-4399-8055-90cd07a53aba service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updating instance_info_cache with network_info: [{"id": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "address": "fa:16:3e:2c:e4:c1", "network": {"id": "5f8a710c-b049-40fe-8cc4-00c97e954fd9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1979325486-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "381de553d9da4c94b923d790c12a28a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap892c8e3d-85", "ovs_interfaceid": "892c8e3d-851e-4ad1-bbab-938e49f4cba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.609284] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.622699] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735073, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.732608] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521ebec5-d286-dab5-9c7b-f3414d42721b, 'name': SearchDatastore_Task, 'duration_secs': 0.030991} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.733601] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3610eeeb-ccb0-4290-96c8-f4174f2e4127 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.746797] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Waiting for the task: (returnval){ [ 629.746797] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d740b1-53bf-0ab7-1d51-dee96a8808c6" [ 629.746797] env[63028]: _type = "Task" [ 629.746797] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.763160] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d740b1-53bf-0ab7-1d51-dee96a8808c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.819133] env[63028]: DEBUG nova.network.neutron [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Updated VIF entry in instance network info cache for port 7efed411-b25a-4981-919a-66c96dd949ee. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 629.819133] env[63028]: DEBUG nova.network.neutron [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Updating instance_info_cache with network_info: [{"id": "7efed411-b25a-4981-919a-66c96dd949ee", "address": "fa:16:3e:f7:4a:16", "network": {"id": "863c7a3c-a546-44b9-b96f-a1782de90239", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2034296608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b1989df63834513aa0b192beb4609ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7efed411-b2", "ovs_interfaceid": "7efed411-b25a-4981-919a-66c96dd949ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.975020] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.975020] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.975020] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.975020] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.975020] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.975020] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.975580] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63028) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 629.975580] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.980158] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735070, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.989197] env[63028]: DEBUG oslo_vmware.api [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735074, 'name': PowerOffVM_Task, 'duration_secs': 0.22362} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.989531] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 629.989752] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 629.990019] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32b122b6-f8fe-4c82-bfbb-189ab9269092 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.020500] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.757s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.021206] env[63028]: DEBUG nova.compute.manager [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 630.024140] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.006s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.026085] env[63028]: INFO nova.compute.claims [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 630.062025] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 630.062025] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 630.062025] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Deleting the datastore file [datastore1] e20ed04f-205b-4aa9-b8b6-e352cd237412 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 630.062025] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73589068-72d3-44b6-a785-0826a2aacf08 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.070470] env[63028]: DEBUG oslo_vmware.api [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 630.070470] env[63028]: value = "task-2735076" [ 630.070470] env[63028]: _type = "Task" [ 630.070470] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.085744] env[63028]: DEBUG oslo_vmware.api [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.091296] env[63028]: DEBUG oslo_concurrency.lockutils [req-90414c49-054b-4d69-b390-5fca153c919d req-bfee1ace-0c23-4399-8055-90cd07a53aba service nova] Releasing lock "refresh_cache-413f7fea-452b-463f-b396-cdd29e8ffa91" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.108638] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735072, 'name': ReconfigVM_Task, 'duration_secs': 0.631263} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.108946] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0/9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 630.109731] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3838d715-3c3d-4510-af8d-7739587de197 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.122598] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735073, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.782639} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.122863] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 2ae111f7-4eaa-46c2-ab97-907daa913834/2ae111f7-4eaa-46c2-ab97-907daa913834.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 630.122970] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 630.123207] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 630.123207] env[63028]: value = "task-2735077" [ 630.123207] env[63028]: _type = "Task" [ 630.123207] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.123390] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6101fd0-ceb6-4ec2-923c-ede9864ebd13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.137281] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735077, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.137933] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 630.137933] env[63028]: value = "task-2735078" [ 630.137933] env[63028]: _type = "Task" [ 630.137933] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.150721] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735078, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.248731] env[63028]: DEBUG nova.network.neutron [-] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.263685] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d740b1-53bf-0ab7-1d51-dee96a8808c6, 'name': SearchDatastore_Task, 'duration_secs': 0.022981} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.267746] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.267746] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] ddf20137-4d63-4c7a-b519-445719265e1d/ddf20137-4d63-4c7a-b519-445719265e1d.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 630.267746] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f6f9928-921e-4e85-ae2e-300dca464c10 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.280540] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Waiting for the task: (returnval){ [ 630.280540] env[63028]: value = "task-2735079" [ 630.280540] env[63028]: _type = "Task" [ 630.280540] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.290605] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735079, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.322368] env[63028]: DEBUG oslo_concurrency.lockutils [req-0dddc1e5-b053-44fd-b98a-af848950108c req-24929790-4aed-4b98-89be-1136615abc6a service nova] Releasing lock "refresh_cache-ddf20137-4d63-4c7a-b519-445719265e1d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.476833] env[63028]: DEBUG oslo_vmware.api [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735070, 'name': RemoveSnapshot_Task, 'duration_secs': 1.95824} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.477180] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 630.477467] env[63028]: INFO nova.compute.manager [None req-53602f48-db0a-4b2c-b359-fb6e89b1101f tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Took 17.95 seconds to snapshot the instance on the hypervisor. [ 630.481167] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.531044] env[63028]: DEBUG nova.compute.utils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 630.534301] env[63028]: DEBUG nova.compute.manager [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 630.534512] env[63028]: DEBUG nova.network.neutron [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 630.585151] env[63028]: DEBUG oslo_vmware.api [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285218} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.585502] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 630.585752] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 630.585995] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 630.586201] env[63028]: INFO nova.compute.manager [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Took 1.14 seconds to destroy the instance on the hypervisor. [ 630.586451] env[63028]: DEBUG oslo.service.loopingcall [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 630.586649] env[63028]: DEBUG nova.compute.manager [-] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 630.586744] env[63028]: DEBUG nova.network.neutron [-] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 630.635083] env[63028]: DEBUG nova.policy [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f325ea113f9433b95bfeb3607ffb8a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1cea592da5614af38b29744934f142fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 630.647148] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735077, 'name': Rename_Task, 'duration_secs': 0.313211} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.651155] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 630.652764] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7dc4b10d-8aa9-4fbd-9518-565f47df97bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.661718] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735078, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079558} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.663530] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 630.664563] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 630.664563] env[63028]: value = "task-2735080" [ 630.664563] env[63028]: _type = "Task" [ 630.664563] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.665621] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb36afd9-cf09-48b8-81e0-8b3d95c6470d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.683271] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735080, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.705819] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] 2ae111f7-4eaa-46c2-ab97-907daa913834/2ae111f7-4eaa-46c2-ab97-907daa913834.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 630.705819] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b01e8c0a-4040-4e2f-b9fe-e233e74b5995 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.734707] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 630.734707] env[63028]: value = "task-2735081" [ 630.734707] env[63028]: _type = "Task" [ 630.734707] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.748608] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735081, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.751407] env[63028]: INFO nova.compute.manager [-] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Took 1.82 seconds to deallocate network for instance. [ 630.795739] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735079, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.026069] env[63028]: DEBUG nova.compute.manager [req-fe5f9778-64e4-46b1-9d3e-5b6ef9c25aa4 req-8b979c17-ab55-4ab3-9d57-2b0fee073712 service nova] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Received event network-vif-deleted-4fb8a759-76bd-4b37-a810-2665ea4a32b3 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 631.041188] env[63028]: DEBUG nova.compute.manager [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 631.180688] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735080, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.250174] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.263086] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.298376] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735079, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.83208} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.299392] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] ddf20137-4d63-4c7a-b519-445719265e1d/ddf20137-4d63-4c7a-b519-445719265e1d.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 631.299392] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 631.299392] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-462fcaae-65dd-47c0-a409-926571e02de1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.312734] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Waiting for the task: (returnval){ [ 631.312734] env[63028]: value = "task-2735082" [ 631.312734] env[63028]: _type = "Task" [ 631.312734] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.322953] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735082, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.631022] env[63028]: DEBUG nova.network.neutron [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Successfully created port: 480124ee-5fda-4fb0-ab85-6641aaa08fac {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 631.676923] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63e8c4a-0283-4b5e-ad3d-976e6d01abaa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.687591] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735080, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.693936] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1dbbc2-d5bb-47a3-84d3-1b7063f43a97 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.742655] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b60bc73-690e-4ff3-bed2-915b77e03e43 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.757308] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735081, 'name': ReconfigVM_Task, 'duration_secs': 0.722675} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.758660] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004615a8-37bc-4942-a22a-89e389072b5a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.764578] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Reconfigured VM instance instance-00000016 to attach disk [datastore2] 2ae111f7-4eaa-46c2-ab97-907daa913834/2ae111f7-4eaa-46c2-ab97-907daa913834.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 631.765592] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-057994bf-f6a4-4274-b257-c31185dfaed2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.780463] env[63028]: DEBUG nova.compute.provider_tree [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.784331] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 631.784331] env[63028]: value = "task-2735083" [ 631.784331] env[63028]: _type = "Task" [ 631.784331] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.797550] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735083, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.822063] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735082, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.17606} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.822380] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 631.823637] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302de008-364e-4c9a-a6fd-c4b9da4b2fdf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.850250] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] ddf20137-4d63-4c7a-b519-445719265e1d/ddf20137-4d63-4c7a-b519-445719265e1d.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 631.850805] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da6757ac-dab1-48bb-8aa4-31f2b966b940 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.871078] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Waiting for the task: (returnval){ [ 631.871078] env[63028]: value = "task-2735084" [ 631.871078] env[63028]: _type = "Task" [ 631.871078] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.882446] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735084, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.956291] env[63028]: DEBUG nova.network.neutron [-] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.062961] env[63028]: DEBUG nova.compute.manager [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 632.105518] env[63028]: DEBUG nova.virt.hardware [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 632.105810] env[63028]: DEBUG nova.virt.hardware [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 632.105895] env[63028]: DEBUG nova.virt.hardware [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 632.106099] env[63028]: DEBUG nova.virt.hardware [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 632.106296] env[63028]: DEBUG nova.virt.hardware [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 632.106427] env[63028]: DEBUG nova.virt.hardware [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 632.106632] env[63028]: DEBUG nova.virt.hardware [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 632.106785] env[63028]: DEBUG nova.virt.hardware [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 632.107064] env[63028]: DEBUG nova.virt.hardware [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 632.107229] env[63028]: DEBUG nova.virt.hardware [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 632.107407] env[63028]: DEBUG nova.virt.hardware [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 632.109107] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac66b4e-d4ed-40da-9b10-552c9544e39e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.120913] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298c2962-01b0-47d2-9ffc-e973a619e88a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.184242] env[63028]: DEBUG oslo_vmware.api [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735080, 'name': PowerOnVM_Task, 'duration_secs': 1.063599} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.184242] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 632.184242] env[63028]: INFO nova.compute.manager [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Took 13.74 seconds to spawn the instance on the hypervisor. [ 632.184242] env[63028]: DEBUG nova.compute.manager [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 632.184832] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fcd6e4-5f5a-440a-af07-6aaa523b4cdc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.284027] env[63028]: DEBUG nova.scheduler.client.report [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 632.307529] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735083, 'name': Rename_Task, 'duration_secs': 0.231738} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.307834] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 632.308210] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c542807-af7b-4fd3-845a-f57e3d3a2f09 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.317924] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 632.317924] env[63028]: value = "task-2735085" [ 632.317924] env[63028]: _type = "Task" [ 632.317924] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.329195] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735085, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.392943] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735084, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.460389] env[63028]: INFO nova.compute.manager [-] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Took 1.87 seconds to deallocate network for instance. [ 632.714626] env[63028]: INFO nova.compute.manager [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Took 35.83 seconds to build instance. [ 632.794228] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.769s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.794228] env[63028]: DEBUG nova.compute.manager [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 632.797507] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.864s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.799491] env[63028]: INFO nova.compute.claims [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 632.831277] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735085, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.885367] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735084, 'name': ReconfigVM_Task, 'duration_secs': 0.643003} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.892328] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Reconfigured VM instance instance-00000015 to attach disk [datastore1] ddf20137-4d63-4c7a-b519-445719265e1d/ddf20137-4d63-4c7a-b519-445719265e1d.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 632.892328] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03634df6-9dc4-42d0-b431-74d9f08e5703 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.903026] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Waiting for the task: (returnval){ [ 632.903026] env[63028]: value = "task-2735086" [ 632.903026] env[63028]: _type = "Task" [ 632.903026] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.913471] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735086, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.982129] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.214989] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e63bf4e-0a01-438b-b252-bf0645d18ec0 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.358s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.308634] env[63028]: DEBUG nova.compute.utils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 633.311707] env[63028]: DEBUG nova.compute.manager [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 633.313150] env[63028]: DEBUG nova.network.neutron [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 633.338362] env[63028]: DEBUG oslo_vmware.api [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735085, 'name': PowerOnVM_Task, 'duration_secs': 0.758224} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.338909] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 633.339727] env[63028]: INFO nova.compute.manager [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Took 6.53 seconds to spawn the instance on the hypervisor. [ 633.341019] env[63028]: DEBUG nova.compute.manager [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 633.341019] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924788b0-0f85-4206-bb66-725cf9bf448d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.417227] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735086, 'name': Rename_Task, 'duration_secs': 0.24909} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.417227] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 633.417518] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a40ad8a-7ff8-4e8d-a510-3af6d88bc465 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.424138] env[63028]: DEBUG nova.compute.manager [req-ae947ca4-17c7-4893-a398-b21dcd3ccccd req-a9dc11f8-690b-49e7-8525-fa5044757e24 service nova] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Received event network-vif-deleted-735ece36-635c-498c-ad00-02b6ef32bba7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 633.426674] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Waiting for the task: (returnval){ [ 633.426674] env[63028]: value = "task-2735087" [ 633.426674] env[63028]: _type = "Task" [ 633.426674] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.431394] env[63028]: DEBUG nova.policy [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e2b4bc1a209945db974bce8a6164ef2b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de9847436cdb4e679067d13380cd1187', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 633.439253] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735087, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.718698] env[63028]: DEBUG nova.compute.manager [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 633.813054] env[63028]: DEBUG nova.compute.manager [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 633.865024] env[63028]: INFO nova.compute.manager [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Took 30.66 seconds to build instance. [ 633.941733] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735087, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.242760] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.366686] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce8dbf78-89da-47a1-a8ca-68ee45531f5f tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lock "2ae111f7-4eaa-46c2-ab97-907daa913834" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.169s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.367630] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27353b13-aabf-47bf-9504-e75ad3d81a50 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.385166] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b7cecd-3c16-42f3-b47b-e356a35c9004 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.423039] env[63028]: DEBUG nova.compute.manager [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 634.424328] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da6a6c5-f27b-4b20-99cb-978108b0ae24 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.428220] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27eeeedf-a7e4-4f23-b397-83c827914162 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.442143] env[63028]: DEBUG oslo_vmware.api [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735087, 'name': PowerOnVM_Task, 'duration_secs': 0.99873} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.446068] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 634.446309] env[63028]: INFO nova.compute.manager [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Took 10.86 seconds to spawn the instance on the hypervisor. [ 634.446491] env[63028]: DEBUG nova.compute.manager [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 634.451727] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62da8a3-71ac-41fb-beff-4238d106fe91 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.454479] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca06d96-db62-4ac2-8179-a87cf36c7aa5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.477131] env[63028]: DEBUG nova.compute.provider_tree [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.828033] env[63028]: DEBUG nova.compute.manager [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 634.858099] env[63028]: DEBUG nova.virt.hardware [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 634.858375] env[63028]: DEBUG nova.virt.hardware [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 634.858511] env[63028]: DEBUG nova.virt.hardware [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 634.858687] env[63028]: DEBUG nova.virt.hardware [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 634.858851] env[63028]: DEBUG nova.virt.hardware [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 634.859593] env[63028]: DEBUG nova.virt.hardware [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 634.859984] env[63028]: DEBUG nova.virt.hardware [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 634.860236] env[63028]: DEBUG nova.virt.hardware [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 634.860559] env[63028]: DEBUG nova.virt.hardware [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 634.860763] env[63028]: DEBUG nova.virt.hardware [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 634.861069] env[63028]: DEBUG nova.virt.hardware [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 634.862665] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c871dd-c674-4b8a-b544-6082b2c5e558 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.876546] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c17944-984e-4d18-b8a8-1c764c5a70b1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.883801] env[63028]: DEBUG nova.network.neutron [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Successfully updated port: 480124ee-5fda-4fb0-ab85-6641aaa08fac {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 634.885921] env[63028]: DEBUG nova.compute.manager [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 634.910557] env[63028]: DEBUG nova.network.neutron [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Successfully created port: 4ab9f841-7392-47cd-afac-be9ddd19b6bf {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 634.960461] env[63028]: INFO nova.compute.manager [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] instance snapshotting [ 634.963874] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d301da6c-19cc-4ed3-b7f7-529c7017c9a7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.996654] env[63028]: DEBUG nova.scheduler.client.report [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 635.004062] env[63028]: INFO nova.compute.manager [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Took 33.40 seconds to build instance. [ 635.004062] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d679333f-8b38-4b2c-991d-e62cfcd9b51c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.391093] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Acquiring lock "refresh_cache-99886410-ec47-46ad-9d07-ee3593006997" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.391361] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Acquired lock "refresh_cache-99886410-ec47-46ad-9d07-ee3593006997" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.391760] env[63028]: DEBUG nova.network.neutron [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 635.432414] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.504405] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.504405] env[63028]: DEBUG nova.compute.manager [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 635.509467] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.351s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.509858] env[63028]: DEBUG nova.objects.instance [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63028) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 635.514214] env[63028]: DEBUG oslo_concurrency.lockutils [None req-29224f45-9b45-4283-a5ed-1190e65e4262 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lock "ddf20137-4d63-4c7a-b519-445719265e1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.920s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.516487] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 635.516869] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-dc73acd0-55a3-4ac4-901a-3d62122f0c4e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.529463] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 635.529463] env[63028]: value = "task-2735088" [ 635.529463] env[63028]: _type = "Task" [ 635.529463] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.555999] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735088, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.910173] env[63028]: INFO nova.compute.manager [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Rebuilding instance [ 635.973443] env[63028]: DEBUG nova.network.neutron [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.976441] env[63028]: DEBUG nova.compute.manager [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 635.977053] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5db4a99-1159-4ac7-ac11-0c5b275bb396 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.020019] env[63028]: DEBUG nova.compute.utils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 636.022054] env[63028]: DEBUG nova.compute.manager [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 636.024807] env[63028]: DEBUG nova.compute.manager [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 636.024807] env[63028]: DEBUG nova.network.neutron [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 636.042355] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735088, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.128104] env[63028]: DEBUG nova.policy [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '00ffdb3e07e94dd2bf7a104002093bda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7ed58d3e63604c2fac29e5744fd7f0bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 636.317744] env[63028]: DEBUG nova.network.neutron [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Updating instance_info_cache with network_info: [{"id": "480124ee-5fda-4fb0-ab85-6641aaa08fac", "address": "fa:16:3e:95:7f:78", "network": {"id": "1d675112-a3cf-4be5-b1eb-fc06409428b9", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-292835398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1cea592da5614af38b29744934f142fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap480124ee-5f", "ovs_interfaceid": "480124ee-5fda-4fb0-ab85-6641aaa08fac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.419356] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1045d1cd-c3b2-4583-b165-f139578add45 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquiring lock "interface-ddf20137-4d63-4c7a-b519-445719265e1d-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.419682] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1045d1cd-c3b2-4583-b165-f139578add45 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lock "interface-ddf20137-4d63-4c7a-b519-445719265e1d-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.420016] env[63028]: DEBUG nova.objects.instance [None req-1045d1cd-c3b2-4583-b165-f139578add45 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lazy-loading 'flavor' on Instance uuid ddf20137-4d63-4c7a-b519-445719265e1d {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 636.528051] env[63028]: DEBUG nova.compute.manager [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 636.535211] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e8cfdc3e-60ac-4489-8889-136c30d59549 tempest-ServersAdmin275Test-1735254146 tempest-ServersAdmin275Test-1735254146-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.028s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.538139] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.977s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.538712] env[63028]: DEBUG nova.objects.instance [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Lazy-loading 'resources' on Instance uuid 1eeb96d1-6e03-4192-a9db-955444519fd7 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 636.553370] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735088, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.562250] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.818994] env[63028]: DEBUG nova.compute.manager [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Received event network-vif-plugged-480124ee-5fda-4fb0-ab85-6641aaa08fac {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 636.819258] env[63028]: DEBUG oslo_concurrency.lockutils [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] Acquiring lock "99886410-ec47-46ad-9d07-ee3593006997-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.819485] env[63028]: DEBUG oslo_concurrency.lockutils [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] Lock "99886410-ec47-46ad-9d07-ee3593006997-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.819775] env[63028]: DEBUG oslo_concurrency.lockutils [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] Lock "99886410-ec47-46ad-9d07-ee3593006997-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.819867] env[63028]: DEBUG nova.compute.manager [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] [instance: 99886410-ec47-46ad-9d07-ee3593006997] No waiting events found dispatching network-vif-plugged-480124ee-5fda-4fb0-ab85-6641aaa08fac {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 636.821329] env[63028]: WARNING nova.compute.manager [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Received unexpected event network-vif-plugged-480124ee-5fda-4fb0-ab85-6641aaa08fac for instance with vm_state building and task_state spawning. [ 636.821444] env[63028]: DEBUG nova.compute.manager [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Received event network-changed-480124ee-5fda-4fb0-ab85-6641aaa08fac {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 636.821615] env[63028]: DEBUG nova.compute.manager [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Refreshing instance network info cache due to event network-changed-480124ee-5fda-4fb0-ab85-6641aaa08fac. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 636.821790] env[63028]: DEBUG oslo_concurrency.lockutils [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] Acquiring lock "refresh_cache-99886410-ec47-46ad-9d07-ee3593006997" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.824194] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Releasing lock "refresh_cache-99886410-ec47-46ad-9d07-ee3593006997" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.824490] env[63028]: DEBUG nova.compute.manager [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Instance network_info: |[{"id": "480124ee-5fda-4fb0-ab85-6641aaa08fac", "address": "fa:16:3e:95:7f:78", "network": {"id": "1d675112-a3cf-4be5-b1eb-fc06409428b9", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-292835398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1cea592da5614af38b29744934f142fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap480124ee-5f", "ovs_interfaceid": "480124ee-5fda-4fb0-ab85-6641aaa08fac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 636.825023] env[63028]: DEBUG oslo_concurrency.lockutils [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] Acquired lock "refresh_cache-99886410-ec47-46ad-9d07-ee3593006997" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.826032] env[63028]: DEBUG nova.network.neutron [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Refreshing network info cache for port 480124ee-5fda-4fb0-ab85-6641aaa08fac {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 636.827668] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:7f:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '480124ee-5fda-4fb0-ab85-6641aaa08fac', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 636.835348] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Creating folder: Project (1cea592da5614af38b29744934f142fb). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 636.836512] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8839e11-5ed1-4566-a831-fe93e86b6b32 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.843686] env[63028]: DEBUG nova.network.neutron [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Successfully created port: cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 636.856515] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Created folder: Project (1cea592da5614af38b29744934f142fb) in parent group-v550570. [ 636.856756] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Creating folder: Instances. Parent ref: group-v550648. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 636.857866] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.857866] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.857866] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c0cee0f-bbea-4f8e-b8c8-abf42e1f3fe5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.873098] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Created folder: Instances in parent group-v550648. [ 636.873379] env[63028]: DEBUG oslo.service.loopingcall [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 636.873672] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 636.873838] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-610e1806-df79-4ad7-a8b2-32ce43ed04b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.906700] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 636.906700] env[63028]: value = "task-2735091" [ 636.906700] env[63028]: _type = "Task" [ 636.906700] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.917063] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735091, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.926176] env[63028]: DEBUG nova.objects.instance [None req-1045d1cd-c3b2-4583-b165-f139578add45 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lazy-loading 'pci_requests' on Instance uuid ddf20137-4d63-4c7a-b519-445719265e1d {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 636.993960] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 636.993960] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-315ac3d0-19c4-4aa2-9a93-5e9047716015 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.007269] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 637.007269] env[63028]: value = "task-2735092" [ 637.007269] env[63028]: _type = "Task" [ 637.007269] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.025997] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735092, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.057807] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735088, 'name': CreateSnapshot_Task, 'duration_secs': 1.056441} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.057807] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 637.058126] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a707e4-b526-47d2-905b-7b9bbeb0fe6e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.126911] env[63028]: DEBUG nova.network.neutron [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Successfully updated port: 4ab9f841-7392-47cd-afac-be9ddd19b6bf {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 637.418795] env[63028]: DEBUG nova.compute.manager [req-314b671a-3a80-4bee-9659-6845ad0c09ec req-9afb036e-4bbe-4a5f-86ad-c22bd8ec24fc service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Received event network-changed-8f21ea1a-db48-4941-98ae-796e9dfcf6b1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 637.418986] env[63028]: DEBUG nova.compute.manager [req-314b671a-3a80-4bee-9659-6845ad0c09ec req-9afb036e-4bbe-4a5f-86ad-c22bd8ec24fc service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Refreshing instance network info cache due to event network-changed-8f21ea1a-db48-4941-98ae-796e9dfcf6b1. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 637.419213] env[63028]: DEBUG oslo_concurrency.lockutils [req-314b671a-3a80-4bee-9659-6845ad0c09ec req-9afb036e-4bbe-4a5f-86ad-c22bd8ec24fc service nova] Acquiring lock "refresh_cache-9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.419375] env[63028]: DEBUG oslo_concurrency.lockutils [req-314b671a-3a80-4bee-9659-6845ad0c09ec req-9afb036e-4bbe-4a5f-86ad-c22bd8ec24fc service nova] Acquired lock "refresh_cache-9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.419615] env[63028]: DEBUG nova.network.neutron [req-314b671a-3a80-4bee-9659-6845ad0c09ec req-9afb036e-4bbe-4a5f-86ad-c22bd8ec24fc service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Refreshing network info cache for port 8f21ea1a-db48-4941-98ae-796e9dfcf6b1 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 637.433758] env[63028]: DEBUG nova.objects.base [None req-1045d1cd-c3b2-4583-b165-f139578add45 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 637.433954] env[63028]: DEBUG nova.network.neutron [None req-1045d1cd-c3b2-4583-b165-f139578add45 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 637.435798] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735091, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.523947] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735092, 'name': PowerOffVM_Task, 'duration_secs': 0.392537} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.524433] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 637.524711] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 637.525546] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6528a7-927a-456f-89a2-683919325224 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.538890] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 637.538890] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94ba62e4-d96d-4fae-8156-8637d7897544 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.547872] env[63028]: DEBUG nova.compute.manager [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 637.585767] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 637.590283] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-50ad93d3-6458-4999-b2a5-a19bbfdc8465 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.593367] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 637.594244] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 637.594244] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Deleting the datastore file [datastore2] 2ae111f7-4eaa-46c2-ab97-907daa913834 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 637.594457] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2829540-3617-486e-96ec-07c4ed774a75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.601302] env[63028]: DEBUG nova.virt.hardware [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 637.601534] env[63028]: DEBUG nova.virt.hardware [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.601700] env[63028]: DEBUG nova.virt.hardware [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 637.601887] env[63028]: DEBUG nova.virt.hardware [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.602057] env[63028]: DEBUG nova.virt.hardware [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 637.602261] env[63028]: DEBUG nova.virt.hardware [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 637.602473] env[63028]: DEBUG nova.virt.hardware [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 637.602632] env[63028]: DEBUG nova.virt.hardware [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 637.602794] env[63028]: DEBUG nova.virt.hardware [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 637.602966] env[63028]: DEBUG nova.virt.hardware [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 637.603636] env[63028]: DEBUG nova.virt.hardware [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 637.604406] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18090751-0cfb-4351-87d3-fb93d03d1661 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.613897] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 637.613897] env[63028]: value = "task-2735094" [ 637.613897] env[63028]: _type = "Task" [ 637.613897] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.613897] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 637.613897] env[63028]: value = "task-2735095" [ 637.613897] env[63028]: _type = "Task" [ 637.613897] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.623443] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1045d1cd-c3b2-4583-b165-f139578add45 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lock "interface-ddf20137-4d63-4c7a-b519-445719265e1d-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.201s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.630534] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0f9237-8dd2-4f93-bfd7-e4e2d5fea80e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.637243] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Acquiring lock "refresh_cache-8f6beda6-0fc6-4d85-9f27-f4248adda8f3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.637588] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Acquired lock "refresh_cache-8f6beda6-0fc6-4d85-9f27-f4248adda8f3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.638638] env[63028]: DEBUG nova.network.neutron [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 637.640381] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735094, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.640381] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735095, 'name': CloneVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.811866] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8560acb-32e7-49db-834a-f849f7c7834b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.824937] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a86bf5-122c-4de6-9e0b-6baf6e0b610f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.866930] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e6441a-f264-461f-bf8e-a1575c4552f4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.875840] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d04fae6-f85f-44f8-82af-49cc63ad5ef9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.896678] env[63028]: DEBUG nova.compute.provider_tree [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.923458] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735091, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.131450] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735095, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.135311] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735094, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116378} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.135311] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 638.135411] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 638.135630] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 638.178288] env[63028]: DEBUG nova.network.neutron [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 638.228332] env[63028]: DEBUG nova.network.neutron [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Updated VIF entry in instance network info cache for port 480124ee-5fda-4fb0-ab85-6641aaa08fac. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 638.228816] env[63028]: DEBUG nova.network.neutron [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Updating instance_info_cache with network_info: [{"id": "480124ee-5fda-4fb0-ab85-6641aaa08fac", "address": "fa:16:3e:95:7f:78", "network": {"id": "1d675112-a3cf-4be5-b1eb-fc06409428b9", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-292835398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1cea592da5614af38b29744934f142fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap480124ee-5f", "ovs_interfaceid": "480124ee-5fda-4fb0-ab85-6641aaa08fac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.351061] env[63028]: DEBUG nova.network.neutron [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Updating instance_info_cache with network_info: [{"id": "4ab9f841-7392-47cd-afac-be9ddd19b6bf", "address": "fa:16:3e:85:18:66", "network": {"id": "495b5d19-156c-488d-bf52-e024e94bbf30", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-552385726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9847436cdb4e679067d13380cd1187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ab9f841-73", "ovs_interfaceid": "4ab9f841-7392-47cd-afac-be9ddd19b6bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.400958] env[63028]: DEBUG nova.scheduler.client.report [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 638.413563] env[63028]: DEBUG nova.network.neutron [req-314b671a-3a80-4bee-9659-6845ad0c09ec req-9afb036e-4bbe-4a5f-86ad-c22bd8ec24fc service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Updated VIF entry in instance network info cache for port 8f21ea1a-db48-4941-98ae-796e9dfcf6b1. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 638.413974] env[63028]: DEBUG nova.network.neutron [req-314b671a-3a80-4bee-9659-6845ad0c09ec req-9afb036e-4bbe-4a5f-86ad-c22bd8ec24fc service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Updating instance_info_cache with network_info: [{"id": "8f21ea1a-db48-4941-98ae-796e9dfcf6b1", "address": "fa:16:3e:5e:61:04", "network": {"id": "bd3f74f8-d12b-4d2e-9aa9-ca1c9e766bf4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1073484567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "178b95ba550d453db2b9868e72a8c93f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f21ea1a-db", "ovs_interfaceid": "8f21ea1a-db48-4941-98ae-796e9dfcf6b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.426154] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735091, 'name': CreateVM_Task, 'duration_secs': 1.369256} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.426454] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 638.427162] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.427363] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.428389] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 638.428389] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77050e53-77f9-4e10-b4fa-70b6c49aadaf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.437744] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Waiting for the task: (returnval){ [ 638.437744] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a5f71f-52c4-02e5-c122-23ff46a158a8" [ 638.437744] env[63028]: _type = "Task" [ 638.437744] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.447249] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a5f71f-52c4-02e5-c122-23ff46a158a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.665516] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735095, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.735909] env[63028]: DEBUG oslo_concurrency.lockutils [req-57048a2b-9d12-43d1-b392-c4d1b27b00a6 req-7f60c349-1ee2-4a7d-bbe5-a2bc97c500d3 service nova] Releasing lock "refresh_cache-99886410-ec47-46ad-9d07-ee3593006997" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.854142] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Releasing lock "refresh_cache-8f6beda6-0fc6-4d85-9f27-f4248adda8f3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.855626] env[63028]: DEBUG nova.compute.manager [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Instance network_info: |[{"id": "4ab9f841-7392-47cd-afac-be9ddd19b6bf", "address": "fa:16:3e:85:18:66", "network": {"id": "495b5d19-156c-488d-bf52-e024e94bbf30", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-552385726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9847436cdb4e679067d13380cd1187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ab9f841-73", "ovs_interfaceid": "4ab9f841-7392-47cd-afac-be9ddd19b6bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 638.855791] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:18:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a874c214-8cdf-4a41-a718-84262b2a28d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ab9f841-7392-47cd-afac-be9ddd19b6bf', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 638.863169] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Creating folder: Project (de9847436cdb4e679067d13380cd1187). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 638.863205] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1017eade-1484-4553-8415-62f1d952968a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.880064] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Created folder: Project (de9847436cdb4e679067d13380cd1187) in parent group-v550570. [ 638.880064] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Creating folder: Instances. Parent ref: group-v550652. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 638.880064] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb8b6d3d-5557-4886-a028-a34ab157570f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.891030] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Created folder: Instances in parent group-v550652. [ 638.891139] env[63028]: DEBUG oslo.service.loopingcall [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 638.891393] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 638.891619] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c78328c8-46bb-432c-9a26-c8b971e9e38f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.908392] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.370s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.914348] env[63028]: DEBUG nova.network.neutron [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Successfully updated port: cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 638.914959] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.417s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.919729] env[63028]: DEBUG nova.objects.instance [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Lazy-loading 'resources' on Instance uuid 67440140-a619-41f2-98fe-eff23e8ad8a5 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 638.923689] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "1af19279-e75b-4ec5-91f1-a0a101b229b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.923689] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "1af19279-e75b-4ec5-91f1-a0a101b229b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.923906] env[63028]: DEBUG oslo_concurrency.lockutils [req-314b671a-3a80-4bee-9659-6845ad0c09ec req-9afb036e-4bbe-4a5f-86ad-c22bd8ec24fc service nova] Releasing lock "refresh_cache-9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.928547] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 638.928547] env[63028]: value = "task-2735098" [ 638.928547] env[63028]: _type = "Task" [ 638.928547] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.945669] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735098, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.960203] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a5f71f-52c4-02e5-c122-23ff46a158a8, 'name': SearchDatastore_Task, 'duration_secs': 0.048389} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.961461] env[63028]: INFO nova.scheduler.client.report [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Deleted allocations for instance 1eeb96d1-6e03-4192-a9db-955444519fd7 [ 638.965868] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.966084] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 638.966699] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.966873] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.967102] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 638.969823] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1152377e-faf7-4e5b-9d4f-ecfef5b2159e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.980909] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 638.981133] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 638.981957] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2f1f48c-ec38-4e8a-82eb-2028f4633938 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.988927] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Waiting for the task: (returnval){ [ 638.988927] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a2fa5a-a45e-156f-0290-fee27d40fd15" [ 638.988927] env[63028]: _type = "Task" [ 638.988927] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.998879] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a2fa5a-a45e-156f-0290-fee27d40fd15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.131961] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735095, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.176639] env[63028]: DEBUG nova.virt.hardware [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 639.177122] env[63028]: DEBUG nova.virt.hardware [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 639.177403] env[63028]: DEBUG nova.virt.hardware [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 639.177688] env[63028]: DEBUG nova.virt.hardware [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 639.177936] env[63028]: DEBUG nova.virt.hardware [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 639.178268] env[63028]: DEBUG nova.virt.hardware [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 639.178596] env[63028]: DEBUG nova.virt.hardware [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 639.180792] env[63028]: DEBUG nova.virt.hardware [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 639.180792] env[63028]: DEBUG nova.virt.hardware [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 639.180792] env[63028]: DEBUG nova.virt.hardware [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 639.180792] env[63028]: DEBUG nova.virt.hardware [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 639.180792] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a84f5c-5f41-4861-95a7-16ad5e43bb53 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.191042] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c72f6e-268b-43af-8e0e-f58f0af14dcc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.215304] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Instance VIF info [] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 639.218048] env[63028]: DEBUG oslo.service.loopingcall [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 639.218180] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 639.218408] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2004f2d0-146f-4913-a513-ea69e6b01935 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.237362] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 639.237362] env[63028]: value = "task-2735099" [ 639.237362] env[63028]: _type = "Task" [ 639.237362] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.246805] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735099, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.427293] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquiring lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.428915] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquired lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.428915] env[63028]: DEBUG nova.network.neutron [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 639.458463] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735098, 'name': CreateVM_Task, 'duration_secs': 0.390809} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.459119] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 639.460062] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.460164] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.460465] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 639.460754] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c9a29e1-2bdc-47f8-ad0f-401a9e71f607 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.468111] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Waiting for the task: (returnval){ [ 639.468111] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5247fda5-915a-eb20-c43f-ec926690c114" [ 639.468111] env[63028]: _type = "Task" [ 639.468111] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.480044] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc2e2e77-fc06-435b-8552-1bfab521033e tempest-ServerDiagnosticsV248Test-1913174578 tempest-ServerDiagnosticsV248Test-1913174578-project-member] Lock "1eeb96d1-6e03-4192-a9db-955444519fd7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.346s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.500061] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5247fda5-915a-eb20-c43f-ec926690c114, 'name': SearchDatastore_Task, 'duration_secs': 0.013798} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.501022] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.501318] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 639.501670] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.507304] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a2fa5a-a45e-156f-0290-fee27d40fd15, 'name': SearchDatastore_Task, 'duration_secs': 0.015197} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.511955] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-331e65bd-daff-400c-afb5-a2ad9bfc2bb0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.520956] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Waiting for the task: (returnval){ [ 639.520956] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fea232-be25-aa43-ea32-c2ab47e33001" [ 639.520956] env[63028]: _type = "Task" [ 639.520956] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.541521] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fea232-be25-aa43-ea32-c2ab47e33001, 'name': SearchDatastore_Task, 'duration_secs': 0.016801} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.541788] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.542055] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 99886410-ec47-46ad-9d07-ee3593006997/99886410-ec47-46ad-9d07-ee3593006997.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 639.542363] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.542544] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 639.542762] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52e13552-67da-40f2-9660-d20e487b0e25 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.544837] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1cf345d4-fe0a-49a7-911a-32ab1835e40b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.555611] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Waiting for the task: (returnval){ [ 639.555611] env[63028]: value = "task-2735100" [ 639.555611] env[63028]: _type = "Task" [ 639.555611] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.559021] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 639.559021] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 639.559775] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-830f6355-8aef-47f1-ad1c-afb5c480cacd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.571291] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.571291] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Waiting for the task: (returnval){ [ 639.571291] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525b31b7-20b2-3c85-930b-fbb43767b635" [ 639.571291] env[63028]: _type = "Task" [ 639.571291] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.584781] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525b31b7-20b2-3c85-930b-fbb43767b635, 'name': SearchDatastore_Task, 'duration_secs': 0.011571} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.585782] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45e5b147-44e5-46a2-b225-29cd8e59d5a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.594664] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Waiting for the task: (returnval){ [ 639.594664] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5279a859-271b-8c3e-dd19-788f2c6f5506" [ 639.594664] env[63028]: _type = "Task" [ 639.594664] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.607022] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5279a859-271b-8c3e-dd19-788f2c6f5506, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.640557] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735095, 'name': CloneVM_Task, 'duration_secs': 1.97694} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.640949] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Created linked-clone VM from snapshot [ 639.641744] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd9e4a5-ee1e-45f4-895d-9f9ecf8b4898 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.652543] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Uploading image f70c3182-7f65-419e-a89d-18cac4191ed9 {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 639.703224] env[63028]: DEBUG oslo_vmware.rw_handles [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 639.703224] env[63028]: value = "vm-550651" [ 639.703224] env[63028]: _type = "VirtualMachine" [ 639.703224] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 639.703691] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-69d45b1e-d18e-4bd8-81ee-860db0127973 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.713555] env[63028]: DEBUG oslo_vmware.rw_handles [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lease: (returnval){ [ 639.713555] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5272e623-d6ee-866d-87c3-5f494a558801" [ 639.713555] env[63028]: _type = "HttpNfcLease" [ 639.713555] env[63028]: } obtained for exporting VM: (result){ [ 639.713555] env[63028]: value = "vm-550651" [ 639.713555] env[63028]: _type = "VirtualMachine" [ 639.713555] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 639.713836] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the lease: (returnval){ [ 639.713836] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5272e623-d6ee-866d-87c3-5f494a558801" [ 639.713836] env[63028]: _type = "HttpNfcLease" [ 639.713836] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 639.722657] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 639.722657] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5272e623-d6ee-866d-87c3-5f494a558801" [ 639.722657] env[63028]: _type = "HttpNfcLease" [ 639.722657] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 639.758776] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735099, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.923511] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Acquiring lock "22713da1-ae53-4bbe-ae55-2490440cbd87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.923511] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Lock "22713da1-ae53-4bbe-ae55-2490440cbd87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.033326] env[63028]: DEBUG nova.compute.manager [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Received event network-vif-plugged-4ab9f841-7392-47cd-afac-be9ddd19b6bf {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 640.033608] env[63028]: DEBUG oslo_concurrency.lockutils [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] Acquiring lock "8f6beda6-0fc6-4d85-9f27-f4248adda8f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.033820] env[63028]: DEBUG oslo_concurrency.lockutils [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] Lock "8f6beda6-0fc6-4d85-9f27-f4248adda8f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.033971] env[63028]: DEBUG oslo_concurrency.lockutils [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] Lock "8f6beda6-0fc6-4d85-9f27-f4248adda8f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.038539] env[63028]: DEBUG nova.compute.manager [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] No waiting events found dispatching network-vif-plugged-4ab9f841-7392-47cd-afac-be9ddd19b6bf {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 640.039040] env[63028]: WARNING nova.compute.manager [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Received unexpected event network-vif-plugged-4ab9f841-7392-47cd-afac-be9ddd19b6bf for instance with vm_state building and task_state spawning. [ 640.039040] env[63028]: DEBUG nova.compute.manager [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Received event network-changed-4ab9f841-7392-47cd-afac-be9ddd19b6bf {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 640.039239] env[63028]: DEBUG nova.compute.manager [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Refreshing instance network info cache due to event network-changed-4ab9f841-7392-47cd-afac-be9ddd19b6bf. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 640.039328] env[63028]: DEBUG oslo_concurrency.lockutils [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] Acquiring lock "refresh_cache-8f6beda6-0fc6-4d85-9f27-f4248adda8f3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.039638] env[63028]: DEBUG oslo_concurrency.lockutils [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] Acquired lock "refresh_cache-8f6beda6-0fc6-4d85-9f27-f4248adda8f3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.039723] env[63028]: DEBUG nova.network.neutron [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Refreshing network info cache for port 4ab9f841-7392-47cd-afac-be9ddd19b6bf {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 640.046886] env[63028]: DEBUG nova.network.neutron [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 640.070081] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735100, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.116049] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5279a859-271b-8c3e-dd19-788f2c6f5506, 'name': SearchDatastore_Task, 'duration_secs': 0.012037} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.120304] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.121921] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 8f6beda6-0fc6-4d85-9f27-f4248adda8f3/8f6beda6-0fc6-4d85-9f27-f4248adda8f3.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 640.121921] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba1f857d-0cef-4b77-a989-682e50ef53f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.145217] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Waiting for the task: (returnval){ [ 640.145217] env[63028]: value = "task-2735102" [ 640.145217] env[63028]: _type = "Task" [ 640.145217] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.156774] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.226701] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 640.226701] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5272e623-d6ee-866d-87c3-5f494a558801" [ 640.226701] env[63028]: _type = "HttpNfcLease" [ 640.226701] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 640.226701] env[63028]: DEBUG oslo_vmware.rw_handles [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 640.226701] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5272e623-d6ee-866d-87c3-5f494a558801" [ 640.226701] env[63028]: _type = "HttpNfcLease" [ 640.226701] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 640.226957] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a916d06-d82b-4d26-85df-c94809e11f06 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.248160] env[63028]: DEBUG oslo_vmware.rw_handles [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52221a6c-7dab-405c-d717-acee72f28fc5/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 640.248160] env[63028]: DEBUG oslo_vmware.rw_handles [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52221a6c-7dab-405c-d717-acee72f28fc5/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 640.257820] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735099, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.320236] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bafe4b4-9e0c-48d2-af08-241f05caa778 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.330120] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39c9910-b578-492f-a20b-b04a498e6915 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.367738] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f538fd70-f160-40b0-8dce-8c4eacd0640b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.380274] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7a77ed-692f-42f5-9fe6-809b0985bf39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.394847] env[63028]: DEBUG nova.compute.provider_tree [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 640.405199] env[63028]: DEBUG nova.network.neutron [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updating instance_info_cache with network_info: [{"id": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "address": "fa:16:3e:a3:ea:21", "network": {"id": "bbda1d1e-8f2d-4594-9711-57cd1f5c1d06", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-842445812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ed58d3e63604c2fac29e5744fd7f0bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6af35d-7e", "ovs_interfaceid": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.419950] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3cb6d98b-e4d3-4f17-ad00-e5a6d7640e56 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.569695] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735100, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626817} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.570079] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 99886410-ec47-46ad-9d07-ee3593006997/99886410-ec47-46ad-9d07-ee3593006997.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 640.571415] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 640.571415] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7481c21-b41b-4cdb-b06b-93b886cc83d1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.580655] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Waiting for the task: (returnval){ [ 640.580655] env[63028]: value = "task-2735103" [ 640.580655] env[63028]: _type = "Task" [ 640.580655] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.592079] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735103, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.609263] env[63028]: DEBUG oslo_concurrency.lockutils [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquiring lock "ddf20137-4d63-4c7a-b519-445719265e1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.609263] env[63028]: DEBUG oslo_concurrency.lockutils [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lock "ddf20137-4d63-4c7a-b519-445719265e1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.609263] env[63028]: DEBUG oslo_concurrency.lockutils [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquiring lock "ddf20137-4d63-4c7a-b519-445719265e1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.609263] env[63028]: DEBUG oslo_concurrency.lockutils [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lock "ddf20137-4d63-4c7a-b519-445719265e1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.609837] env[63028]: DEBUG oslo_concurrency.lockutils [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lock "ddf20137-4d63-4c7a-b519-445719265e1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.610357] env[63028]: INFO nova.compute.manager [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Terminating instance [ 640.663056] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735102, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518153} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.663056] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 8f6beda6-0fc6-4d85-9f27-f4248adda8f3/8f6beda6-0fc6-4d85-9f27-f4248adda8f3.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 640.663056] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 640.663056] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df31696b-26bf-4501-b124-7fab3f24cd48 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.673251] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Waiting for the task: (returnval){ [ 640.673251] env[63028]: value = "task-2735104" [ 640.673251] env[63028]: _type = "Task" [ 640.673251] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.687149] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735104, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.757052] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735099, 'name': CreateVM_Task, 'duration_secs': 1.370292} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.758876] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 640.760215] env[63028]: DEBUG oslo_concurrency.lockutils [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.760215] env[63028]: DEBUG oslo_concurrency.lockutils [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.760357] env[63028]: DEBUG oslo_concurrency.lockutils [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 640.760632] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00968afe-e1d5-4466-9441-022753f1abb4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.767144] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 640.767144] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e8cdca-3362-b622-ac87-bc5b16d7b009" [ 640.767144] env[63028]: _type = "Task" [ 640.767144] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.781129] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e8cdca-3362-b622-ac87-bc5b16d7b009, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.908780] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Releasing lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.909412] env[63028]: DEBUG nova.compute.manager [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Instance network_info: |[{"id": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "address": "fa:16:3e:a3:ea:21", "network": {"id": "bbda1d1e-8f2d-4594-9711-57cd1f5c1d06", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-842445812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ed58d3e63604c2fac29e5744fd7f0bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6af35d-7e", "ovs_interfaceid": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 640.915496] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:ea:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0685bd0b-3dbf-4a06-951c-c6a4726dd4b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc6af35d-7e46-40e6-bc97-40efda1ab807', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.927271] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Creating folder: Project (7ed58d3e63604c2fac29e5744fd7f0bc). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.927720] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b7f71c2-a987-4774-87ff-3d3886ee802c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.934669] env[63028]: ERROR nova.scheduler.client.report [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [req-49c713c7-22f6-4a94-8257-92dea8427865] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-49c713c7-22f6-4a94-8257-92dea8427865"}]} [ 640.945156] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Created folder: Project (7ed58d3e63604c2fac29e5744fd7f0bc) in parent group-v550570. [ 640.945156] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Creating folder: Instances. Parent ref: group-v550656. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.945156] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa181685-66f9-48a2-a9cd-77025183caab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.956960] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Created folder: Instances in parent group-v550656. [ 640.957596] env[63028]: DEBUG oslo.service.loopingcall [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 640.957806] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.960926] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e726b5b7-8f17-4f4c-aea5-ccb21a6a3762 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.980076] env[63028]: DEBUG nova.scheduler.client.report [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Refreshing inventories for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 640.988148] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "0d96ba8e-b46b-48ae-957c-cdc49762c395" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.988148] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "0d96ba8e-b46b-48ae-957c-cdc49762c395" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.990987] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.990987] env[63028]: value = "task-2735107" [ 640.990987] env[63028]: _type = "Task" [ 640.990987] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.007244] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735107, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.008823] env[63028]: DEBUG nova.scheduler.client.report [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Updating ProviderTree inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 641.009081] env[63028]: DEBUG nova.compute.provider_tree [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 641.014245] env[63028]: DEBUG nova.network.neutron [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Updated VIF entry in instance network info cache for port 4ab9f841-7392-47cd-afac-be9ddd19b6bf. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 641.014606] env[63028]: DEBUG nova.network.neutron [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Updating instance_info_cache with network_info: [{"id": "4ab9f841-7392-47cd-afac-be9ddd19b6bf", "address": "fa:16:3e:85:18:66", "network": {"id": "495b5d19-156c-488d-bf52-e024e94bbf30", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-552385726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9847436cdb4e679067d13380cd1187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ab9f841-73", "ovs_interfaceid": "4ab9f841-7392-47cd-afac-be9ddd19b6bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.030216] env[63028]: DEBUG nova.scheduler.client.report [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Refreshing aggregate associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, aggregates: None {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 641.064699] env[63028]: DEBUG nova.scheduler.client.report [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Refreshing trait associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 641.099312] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735103, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115374} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.101814] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 641.101814] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b1ce96-9c70-4667-b9ea-a758bf423191 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.127541] env[63028]: DEBUG nova.compute.manager [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 641.128030] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 641.142584] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 99886410-ec47-46ad-9d07-ee3593006997/99886410-ec47-46ad-9d07-ee3593006997.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 641.145082] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b1a26a-f02c-4f0b-881c-777f322ce7b6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.148654] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e01454a-cc34-45a7-9b83-bc80ba1eeebc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.172763] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 641.174549] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f928b91-7adc-4cc5-be80-313545663859 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.178087] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Waiting for the task: (returnval){ [ 641.178087] env[63028]: value = "task-2735111" [ 641.178087] env[63028]: _type = "Task" [ 641.178087] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.197578] env[63028]: DEBUG oslo_vmware.api [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Waiting for the task: (returnval){ [ 641.197578] env[63028]: value = "task-2735112" [ 641.197578] env[63028]: _type = "Task" [ 641.197578] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.204859] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735104, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.146167} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.209504] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 641.210056] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.214453] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e0aa39-6f0c-4347-8f84-3ea9ba601e51 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.221530] env[63028]: DEBUG oslo_vmware.api [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735112, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.246721] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 8f6beda6-0fc6-4d85-9f27-f4248adda8f3/8f6beda6-0fc6-4d85-9f27-f4248adda8f3.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 641.250139] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d081df0-4b57-4577-af2e-7e57904a6bfe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.285149] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e8cdca-3362-b622-ac87-bc5b16d7b009, 'name': SearchDatastore_Task, 'duration_secs': 0.015066} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.285149] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Waiting for the task: (returnval){ [ 641.285149] env[63028]: value = "task-2735113" [ 641.285149] env[63028]: _type = "Task" [ 641.285149] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.285149] env[63028]: DEBUG oslo_concurrency.lockutils [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.285149] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 641.285658] env[63028]: DEBUG oslo_concurrency.lockutils [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.285658] env[63028]: DEBUG oslo_concurrency.lockutils [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.285658] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 641.286070] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9106d5b0-6f94-4493-82a6-2c201dfbf9e8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.306063] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735113, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.306063] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 641.306063] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 641.306063] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd8a167c-f86a-4fa3-94fa-f3016ebcfabc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.312452] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 641.312452] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5249d405-f2ba-5b6b-3c45-d61a4a562003" [ 641.312452] env[63028]: _type = "Task" [ 641.312452] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.326429] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5249d405-f2ba-5b6b-3c45-d61a4a562003, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.508635] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735107, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.518885] env[63028]: DEBUG oslo_concurrency.lockutils [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] Releasing lock "refresh_cache-8f6beda6-0fc6-4d85-9f27-f4248adda8f3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.519320] env[63028]: DEBUG nova.compute.manager [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Received event network-vif-plugged-cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 641.519615] env[63028]: DEBUG oslo_concurrency.lockutils [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] Acquiring lock "0e07a6cd-8c99-408d-95ba-63f7839c327f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.519818] env[63028]: DEBUG oslo_concurrency.lockutils [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] Lock "0e07a6cd-8c99-408d-95ba-63f7839c327f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.519931] env[63028]: DEBUG oslo_concurrency.lockutils [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] Lock "0e07a6cd-8c99-408d-95ba-63f7839c327f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.520102] env[63028]: DEBUG nova.compute.manager [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] No waiting events found dispatching network-vif-plugged-cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 641.520897] env[63028]: WARNING nova.compute.manager [req-bd4105aa-e802-4bb5-9f67-daa29c904aab req-eae70dd5-a359-4fcf-95e9-c05755853463 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Received unexpected event network-vif-plugged-cc6af35d-7e46-40e6-bc97-40efda1ab807 for instance with vm_state building and task_state spawning. [ 641.697845] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.712299] env[63028]: DEBUG oslo_vmware.api [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735112, 'name': PowerOffVM_Task, 'duration_secs': 0.266734} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.712575] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 641.712740] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 641.712996] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd980769-21ff-4150-aeee-675be48cf053 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.802701] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735113, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.804680] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 641.805142] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 641.805442] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Deleting the datastore file [datastore1] ddf20137-4d63-4c7a-b519-445719265e1d {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 641.807395] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1842428-be76-4cd2-a8db-4fc1607e39c1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.822459] env[63028]: DEBUG oslo_vmware.api [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Waiting for the task: (returnval){ [ 641.822459] env[63028]: value = "task-2735115" [ 641.822459] env[63028]: _type = "Task" [ 641.822459] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.831679] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5249d405-f2ba-5b6b-3c45-d61a4a562003, 'name': SearchDatastore_Task, 'duration_secs': 0.018464} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.833332] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5131e4a0-c989-40ba-b47d-334fc18f5d06 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.840176] env[63028]: DEBUG oslo_vmware.api [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.844964] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 641.844964] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52354feb-aac8-d1d3-0e83-d7881de995d4" [ 641.844964] env[63028]: _type = "Task" [ 641.844964] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.858406] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52354feb-aac8-d1d3-0e83-d7881de995d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.881347] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94645695-27de-4a31-a7ed-379c6026fcb3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.890621] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c07e355-1425-4957-a4a7-118b271ebb1c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.927256] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a30b66a-6139-4165-8c59-0a55cb700ad0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.934917] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2a8ddd-0caf-463c-ba40-d6d84c8896dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.951439] env[63028]: DEBUG nova.compute.provider_tree [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 642.002966] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735107, 'name': CreateVM_Task, 'duration_secs': 0.633317} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.003157] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 642.003874] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.004059] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.008230] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 642.015025] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00a363bd-2708-4f55-a32d-7a0e19011431 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.015738] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for the task: (returnval){ [ 642.015738] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5238debc-d22c-31e7-5fe8-e83cca2efa6f" [ 642.015738] env[63028]: _type = "Task" [ 642.015738] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.026174] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5238debc-d22c-31e7-5fe8-e83cca2efa6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.196377] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735111, 'name': ReconfigVM_Task, 'duration_secs': 0.693574} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.196764] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 99886410-ec47-46ad-9d07-ee3593006997/99886410-ec47-46ad-9d07-ee3593006997.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 642.197975] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90e6b958-5ee6-485e-8504-cf361fa7ad59 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.205769] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Waiting for the task: (returnval){ [ 642.205769] env[63028]: value = "task-2735116" [ 642.205769] env[63028]: _type = "Task" [ 642.205769] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.215712] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735116, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.298506] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735113, 'name': ReconfigVM_Task, 'duration_secs': 0.615618} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.298917] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 8f6beda6-0fc6-4d85-9f27-f4248adda8f3/8f6beda6-0fc6-4d85-9f27-f4248adda8f3.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 642.299664] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c4dbe60-d221-4aaf-b95a-2f25e266a199 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.311550] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Waiting for the task: (returnval){ [ 642.311550] env[63028]: value = "task-2735117" [ 642.311550] env[63028]: _type = "Task" [ 642.311550] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.327150] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735117, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.338961] env[63028]: DEBUG oslo_vmware.api [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Task: {'id': task-2735115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.453681} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.339194] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 642.339457] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 642.339714] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 642.340037] env[63028]: INFO nova.compute.manager [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Took 1.21 seconds to destroy the instance on the hypervisor. [ 642.340339] env[63028]: DEBUG oslo.service.loopingcall [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 642.340590] env[63028]: DEBUG nova.compute.manager [-] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 642.341153] env[63028]: DEBUG nova.network.neutron [-] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 642.357571] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52354feb-aac8-d1d3-0e83-d7881de995d4, 'name': SearchDatastore_Task, 'duration_secs': 0.018328} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.360137] env[63028]: DEBUG oslo_concurrency.lockutils [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.360137] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 2ae111f7-4eaa-46c2-ab97-907daa913834/2ae111f7-4eaa-46c2-ab97-907daa913834.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 642.360137] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4001d03c-f9d0-492c-9554-b20702d46d08 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.367899] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 642.367899] env[63028]: value = "task-2735118" [ 642.367899] env[63028]: _type = "Task" [ 642.367899] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.382145] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735118, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.476420] env[63028]: ERROR nova.scheduler.client.report [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] [req-0ddb1832-689e-47e0-9f85-ccbead7f2f85] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0ddb1832-689e-47e0-9f85-ccbead7f2f85"}]} [ 642.506101] env[63028]: DEBUG nova.scheduler.client.report [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Refreshing inventories for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 642.524208] env[63028]: DEBUG nova.scheduler.client.report [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Updating ProviderTree inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 642.524466] env[63028]: DEBUG nova.compute.provider_tree [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 642.531221] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5238debc-d22c-31e7-5fe8-e83cca2efa6f, 'name': SearchDatastore_Task, 'duration_secs': 0.018241} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.531689] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.532031] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 642.532721] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.532950] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.533254] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 642.533603] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d71766f-77cd-41b7-bc70-6191b748ba18 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.547423] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "e2d39c43-6666-4fda-b8e2-485399c59e46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.547798] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "e2d39c43-6666-4fda-b8e2-485399c59e46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.548162] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 642.548394] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 642.550043] env[63028]: DEBUG nova.scheduler.client.report [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Refreshing aggregate associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, aggregates: None {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 642.553837] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1070bf00-5669-4c55-a1b6-d959e506c75f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.561145] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for the task: (returnval){ [ 642.561145] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52277df3-f802-5c7b-a9dc-50ba6e361319" [ 642.561145] env[63028]: _type = "Task" [ 642.561145] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.570751] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52277df3-f802-5c7b-a9dc-50ba6e361319, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.576764] env[63028]: DEBUG nova.scheduler.client.report [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Refreshing trait associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 642.706782] env[63028]: DEBUG nova.compute.manager [req-20d4a470-a225-4afb-b293-b8626f65fcbb req-034ef669-9864-4959-ae4e-73fda17bf45a service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Received event network-changed-cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 642.707126] env[63028]: DEBUG nova.compute.manager [req-20d4a470-a225-4afb-b293-b8626f65fcbb req-034ef669-9864-4959-ae4e-73fda17bf45a service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Refreshing instance network info cache due to event network-changed-cc6af35d-7e46-40e6-bc97-40efda1ab807. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 642.707380] env[63028]: DEBUG oslo_concurrency.lockutils [req-20d4a470-a225-4afb-b293-b8626f65fcbb req-034ef669-9864-4959-ae4e-73fda17bf45a service nova] Acquiring lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.707541] env[63028]: DEBUG oslo_concurrency.lockutils [req-20d4a470-a225-4afb-b293-b8626f65fcbb req-034ef669-9864-4959-ae4e-73fda17bf45a service nova] Acquired lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.707700] env[63028]: DEBUG nova.network.neutron [req-20d4a470-a225-4afb-b293-b8626f65fcbb req-034ef669-9864-4959-ae4e-73fda17bf45a service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Refreshing network info cache for port cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 642.728755] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735116, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.824640] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735117, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.888085] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735118, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.074353] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52277df3-f802-5c7b-a9dc-50ba6e361319, 'name': SearchDatastore_Task, 'duration_secs': 0.016442} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.078935] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f68c558-18ce-40e6-836f-c17a44f2f6f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.099405] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for the task: (returnval){ [ 643.099405] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525f1027-178a-269e-395d-56fd64734cfe" [ 643.099405] env[63028]: _type = "Task" [ 643.099405] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.111504] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525f1027-178a-269e-395d-56fd64734cfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.230501] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735116, 'name': Rename_Task, 'duration_secs': 0.845701} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.233438] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 643.233438] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83a21899-6353-4835-ba49-afcaf756e111 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.242097] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Waiting for the task: (returnval){ [ 643.242097] env[63028]: value = "task-2735119" [ 643.242097] env[63028]: _type = "Task" [ 643.242097] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.253188] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.286170] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12bab70-d256-4d8d-836c-5dc209d14535 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.294594] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284a0ac8-feca-460c-afae-c20db466fadd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.331806] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff59d113-f50c-4666-b9e1-eaf55f7f7ec7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.340489] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735117, 'name': Rename_Task, 'duration_secs': 0.739963} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.343480] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 643.344154] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edb28585-2bdc-4d22-8419-fc840d959e7d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.346825] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20a51e7-f876-49db-9448-ee8a16347d70 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.365918] env[63028]: DEBUG nova.compute.provider_tree [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 643.370172] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Waiting for the task: (returnval){ [ 643.370172] env[63028]: value = "task-2735120" [ 643.370172] env[63028]: _type = "Task" [ 643.370172] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.389447] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735118, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.651765} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.394144] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 2ae111f7-4eaa-46c2-ab97-907daa913834/2ae111f7-4eaa-46c2-ab97-907daa913834.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 643.394144] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 643.397667] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735120, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.398945] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fccf9e11-2e53-4537-b86b-93d99e3985a9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.412360] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 643.412360] env[63028]: value = "task-2735121" [ 643.412360] env[63028]: _type = "Task" [ 643.412360] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.434259] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735121, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.615600] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525f1027-178a-269e-395d-56fd64734cfe, 'name': SearchDatastore_Task, 'duration_secs': 0.011884} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.615960] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.616811] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 0e07a6cd-8c99-408d-95ba-63f7839c327f/0e07a6cd-8c99-408d-95ba-63f7839c327f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 643.616811] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31178ca1-bb82-480c-bcae-f34ab4b96abb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.626948] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for the task: (returnval){ [ 643.626948] env[63028]: value = "task-2735122" [ 643.626948] env[63028]: _type = "Task" [ 643.626948] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.640329] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.690468] env[63028]: DEBUG nova.network.neutron [-] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.734351] env[63028]: DEBUG nova.network.neutron [req-20d4a470-a225-4afb-b293-b8626f65fcbb req-034ef669-9864-4959-ae4e-73fda17bf45a service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updated VIF entry in instance network info cache for port cc6af35d-7e46-40e6-bc97-40efda1ab807. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 643.734351] env[63028]: DEBUG nova.network.neutron [req-20d4a470-a225-4afb-b293-b8626f65fcbb req-034ef669-9864-4959-ae4e-73fda17bf45a service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updating instance_info_cache with network_info: [{"id": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "address": "fa:16:3e:a3:ea:21", "network": {"id": "bbda1d1e-8f2d-4594-9711-57cd1f5c1d06", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-842445812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ed58d3e63604c2fac29e5744fd7f0bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6af35d-7e", "ovs_interfaceid": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.755129] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735119, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.891785] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735120, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.936747] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735121, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123746} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.937919] env[63028]: DEBUG nova.scheduler.client.report [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Updated inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with generation 51 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 643.939316] env[63028]: DEBUG nova.compute.provider_tree [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 51 to 52 during operation: update_inventory {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 643.939316] env[63028]: DEBUG nova.compute.provider_tree [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 643.943453] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 643.944804] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0165908-fa41-4bfd-a775-cc13576f921e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.977569] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 2ae111f7-4eaa-46c2-ab97-907daa913834/2ae111f7-4eaa-46c2-ab97-907daa913834.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 643.979265] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71293e80-f0b1-4d45-85e3-81c53d0c8be5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.009668] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 644.009668] env[63028]: value = "task-2735123" [ 644.009668] env[63028]: _type = "Task" [ 644.009668] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.024949] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735123, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.140405] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735122, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.195395] env[63028]: INFO nova.compute.manager [-] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Took 1.85 seconds to deallocate network for instance. [ 644.239925] env[63028]: DEBUG oslo_concurrency.lockutils [req-20d4a470-a225-4afb-b293-b8626f65fcbb req-034ef669-9864-4959-ae4e-73fda17bf45a service nova] Releasing lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.259113] env[63028]: DEBUG oslo_vmware.api [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735119, 'name': PowerOnVM_Task, 'duration_secs': 0.707523} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.259527] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 644.260080] env[63028]: INFO nova.compute.manager [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Took 12.20 seconds to spawn the instance on the hypervisor. [ 644.260080] env[63028]: DEBUG nova.compute.manager [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 644.261511] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4345906a-db4f-451a-8aac-510ff5e29e14 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.389792] env[63028]: DEBUG oslo_vmware.api [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735120, 'name': PowerOnVM_Task, 'duration_secs': 0.690015} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.390147] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 644.390407] env[63028]: INFO nova.compute.manager [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Took 9.56 seconds to spawn the instance on the hypervisor. [ 644.390848] env[63028]: DEBUG nova.compute.manager [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 644.391761] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a4eeb7-d693-4940-bdfc-9e828a0b71d4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.445825] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 5.531s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.449302] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.108s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.450566] env[63028]: INFO nova.compute.claims [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 644.483806] env[63028]: INFO nova.scheduler.client.report [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Deleted allocations for instance 67440140-a619-41f2-98fe-eff23e8ad8a5 [ 644.525490] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735123, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.642315] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735122, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.660558} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.642315] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 0e07a6cd-8c99-408d-95ba-63f7839c327f/0e07a6cd-8c99-408d-95ba-63f7839c327f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 644.642315] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 644.642315] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8488c8d4-b9b8-4f92-8c2c-58c9809d3879 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.650423] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for the task: (returnval){ [ 644.650423] env[63028]: value = "task-2735125" [ 644.650423] env[63028]: _type = "Task" [ 644.650423] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.667117] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735125, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.702759] env[63028]: DEBUG oslo_concurrency.lockutils [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.740503] env[63028]: DEBUG nova.compute.manager [req-12dcb85b-5757-497b-a77a-7e6f1dc3cb00 req-ad73066a-d49f-4af7-ac7f-d1d509b8f4f8 service nova] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Received event network-vif-deleted-7efed411-b25a-4981-919a-66c96dd949ee {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 644.785993] env[63028]: INFO nova.compute.manager [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Took 39.61 seconds to build instance. [ 644.912852] env[63028]: INFO nova.compute.manager [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Took 38.94 seconds to build instance. [ 644.997556] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4c82103-d8b7-4845-a6b4-abab95e8e4fb tempest-ServerExternalEventsTest-1563001839 tempest-ServerExternalEventsTest-1563001839-project-member] Lock "67440140-a619-41f2-98fe-eff23e8ad8a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.480s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.023867] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735123, 'name': ReconfigVM_Task, 'duration_secs': 0.547499} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.024271] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 2ae111f7-4eaa-46c2-ab97-907daa913834/2ae111f7-4eaa-46c2-ab97-907daa913834.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 645.024957] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64955d30-ca45-4e8d-8335-1124510a940a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.034027] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 645.034027] env[63028]: value = "task-2735126" [ 645.034027] env[63028]: _type = "Task" [ 645.034027] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.043957] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735126, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.161442] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735125, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107301} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.161791] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 645.162637] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b956d61e-f3ed-4bc2-804b-7dc1df7a8962 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.191579] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 0e07a6cd-8c99-408d-95ba-63f7839c327f/0e07a6cd-8c99-408d-95ba-63f7839c327f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 645.191986] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cedf8e7a-8d4a-476f-8818-5edcfe03a628 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.214783] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for the task: (returnval){ [ 645.214783] env[63028]: value = "task-2735127" [ 645.214783] env[63028]: _type = "Task" [ 645.214783] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.225530] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735127, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.289211] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5ece637-2108-49f8-8679-958eee5e45ec tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Lock "99886410-ec47-46ad-9d07-ee3593006997" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.118s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.417767] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a93d865e-c2f1-4997-9b78-d5c953b30008 tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Lock "8f6beda6-0fc6-4d85-9f27-f4248adda8f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.455s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.551065] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735126, 'name': Rename_Task, 'duration_secs': 0.259975} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.555599] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 645.555898] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-40fd6c07-07cf-4be0-bece-fb5555179b4b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.568785] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 645.568785] env[63028]: value = "task-2735128" [ 645.568785] env[63028]: _type = "Task" [ 645.568785] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.594365] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735128, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.731563] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735127, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.792196] env[63028]: DEBUG nova.compute.manager [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 645.920850] env[63028]: DEBUG nova.compute.manager [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 646.078803] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735128, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.153818] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad84614-9297-4fe5-942f-33cc694c8d2f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.164035] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7881241-e9cc-4c41-805d-f2e2f806a15a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.200219] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5111032-918f-4790-8435-9111a853533a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.212698] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebbdb5a-dcf7-4df4-abd8-df715be8a073 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.228321] env[63028]: DEBUG nova.compute.provider_tree [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 646.238658] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735127, 'name': ReconfigVM_Task, 'duration_secs': 0.538524} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.238658] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 0e07a6cd-8c99-408d-95ba-63f7839c327f/0e07a6cd-8c99-408d-95ba-63f7839c327f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 646.240322] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4c85ae4-b1f3-4c85-99b2-b0e2219421a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.247303] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for the task: (returnval){ [ 646.247303] env[63028]: value = "task-2735130" [ 646.247303] env[63028]: _type = "Task" [ 646.247303] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.259475] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735130, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.316755] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.449358] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.582914] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735128, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.763779] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735130, 'name': Rename_Task, 'duration_secs': 0.245167} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.764279] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 646.764829] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdfc639d-acad-4952-a2ab-5e43c14ed534 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.771770] env[63028]: DEBUG nova.scheduler.client.report [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Updated inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with generation 52 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 646.772167] env[63028]: DEBUG nova.compute.provider_tree [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 52 to 53 during operation: update_inventory {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 646.772464] env[63028]: DEBUG nova.compute.provider_tree [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 646.781755] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for the task: (returnval){ [ 646.781755] env[63028]: value = "task-2735131" [ 646.781755] env[63028]: _type = "Task" [ 646.781755] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.792689] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.085688] env[63028]: DEBUG oslo_vmware.api [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735128, 'name': PowerOnVM_Task, 'duration_secs': 1.363093} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.085688] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 647.085964] env[63028]: DEBUG nova.compute.manager [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 647.087189] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380a3ed8-151b-42a6-bf82-314527eac005 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.129027] env[63028]: DEBUG nova.compute.manager [req-fd0b9d14-1627-4f17-8bb8-d02f75a39e3f req-778fb407-d157-4b64-bc72-d889d2a55b2a service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Received event network-changed-4ab9f841-7392-47cd-afac-be9ddd19b6bf {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 647.129027] env[63028]: DEBUG nova.compute.manager [req-fd0b9d14-1627-4f17-8bb8-d02f75a39e3f req-778fb407-d157-4b64-bc72-d889d2a55b2a service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Refreshing instance network info cache due to event network-changed-4ab9f841-7392-47cd-afac-be9ddd19b6bf. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 647.129027] env[63028]: DEBUG oslo_concurrency.lockutils [req-fd0b9d14-1627-4f17-8bb8-d02f75a39e3f req-778fb407-d157-4b64-bc72-d889d2a55b2a service nova] Acquiring lock "refresh_cache-8f6beda6-0fc6-4d85-9f27-f4248adda8f3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.129027] env[63028]: DEBUG oslo_concurrency.lockutils [req-fd0b9d14-1627-4f17-8bb8-d02f75a39e3f req-778fb407-d157-4b64-bc72-d889d2a55b2a service nova] Acquired lock "refresh_cache-8f6beda6-0fc6-4d85-9f27-f4248adda8f3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.129027] env[63028]: DEBUG nova.network.neutron [req-fd0b9d14-1627-4f17-8bb8-d02f75a39e3f req-778fb407-d157-4b64-bc72-d889d2a55b2a service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Refreshing network info cache for port 4ab9f841-7392-47cd-afac-be9ddd19b6bf {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 647.283492] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.835s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.283995] env[63028]: DEBUG nova.compute.manager [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 647.290051] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.814s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.290657] env[63028]: INFO nova.compute.claims [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.315155] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735131, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.610406] env[63028]: DEBUG oslo_concurrency.lockutils [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.646411] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "c06813c4-472d-4bf9-84ec-0d01306bcd48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.646411] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.806170] env[63028]: DEBUG oslo_vmware.api [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735131, 'name': PowerOnVM_Task, 'duration_secs': 0.982962} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.808216] env[63028]: DEBUG nova.compute.utils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 647.811156] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 647.811389] env[63028]: INFO nova.compute.manager [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Took 10.26 seconds to spawn the instance on the hypervisor. [ 647.811672] env[63028]: DEBUG nova.compute.manager [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 647.815094] env[63028]: DEBUG nova.compute.manager [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 647.815290] env[63028]: DEBUG nova.network.neutron [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 647.817918] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9908cae2-1f2d-48ab-b9db-32abb4818418 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.944255] env[63028]: DEBUG nova.policy [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b48f3f2a85945379bdb33bf153bde9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25a6457f62d149629c09589feb1a550c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 648.316945] env[63028]: DEBUG nova.compute.manager [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 648.351924] env[63028]: INFO nova.compute.manager [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Took 39.44 seconds to build instance. [ 648.368028] env[63028]: DEBUG nova.network.neutron [req-fd0b9d14-1627-4f17-8bb8-d02f75a39e3f req-778fb407-d157-4b64-bc72-d889d2a55b2a service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Updated VIF entry in instance network info cache for port 4ab9f841-7392-47cd-afac-be9ddd19b6bf. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 648.368141] env[63028]: DEBUG nova.network.neutron [req-fd0b9d14-1627-4f17-8bb8-d02f75a39e3f req-778fb407-d157-4b64-bc72-d889d2a55b2a service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Updating instance_info_cache with network_info: [{"id": "4ab9f841-7392-47cd-afac-be9ddd19b6bf", "address": "fa:16:3e:85:18:66", "network": {"id": "495b5d19-156c-488d-bf52-e024e94bbf30", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-552385726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de9847436cdb4e679067d13380cd1187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a874c214-8cdf-4a41-a718-84262b2a28d8", "external-id": "cl2-zone-726", "segmentation_id": 726, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ab9f841-73", "ovs_interfaceid": "4ab9f841-7392-47cd-afac-be9ddd19b6bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.489786] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquiring lock "2ae111f7-4eaa-46c2-ab97-907daa913834" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.490091] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lock "2ae111f7-4eaa-46c2-ab97-907daa913834" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.490331] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquiring lock "2ae111f7-4eaa-46c2-ab97-907daa913834-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.490526] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lock "2ae111f7-4eaa-46c2-ab97-907daa913834-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.490719] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lock "2ae111f7-4eaa-46c2-ab97-907daa913834-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.493990] env[63028]: INFO nova.compute.manager [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Terminating instance [ 648.663433] env[63028]: DEBUG nova.network.neutron [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Successfully created port: 39f160e2-809e-4b2c-9424-70448b807385 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.855726] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5282f999-0e6a-4d73-89f0-a01dea2945b9 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lock "0e07a6cd-8c99-408d-95ba-63f7839c327f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.698s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.874022] env[63028]: DEBUG oslo_concurrency.lockutils [req-fd0b9d14-1627-4f17-8bb8-d02f75a39e3f req-778fb407-d157-4b64-bc72-d889d2a55b2a service nova] Releasing lock "refresh_cache-8f6beda6-0fc6-4d85-9f27-f4248adda8f3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.968353] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565ddd41-8a1f-47dd-adec-00ba6e077975 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.980362] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f67486-f88a-4842-9d5f-a87d11bba8de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.023469] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquiring lock "refresh_cache-2ae111f7-4eaa-46c2-ab97-907daa913834" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.024098] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquired lock "refresh_cache-2ae111f7-4eaa-46c2-ab97-907daa913834" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.024228] env[63028]: DEBUG nova.network.neutron [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.028330] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8add88c1-1883-40a2-ba0d-814145bc14cf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.042083] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d983f5bc-a6ce-44bf-b4f2-ea70b3eda64b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.068551] env[63028]: DEBUG nova.compute.provider_tree [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.341211] env[63028]: DEBUG nova.compute.manager [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 649.360413] env[63028]: DEBUG nova.compute.manager [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 649.366009] env[63028]: DEBUG nova.virt.hardware [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 649.366380] env[63028]: DEBUG nova.virt.hardware [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.366442] env[63028]: DEBUG nova.virt.hardware [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 649.366643] env[63028]: DEBUG nova.virt.hardware [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.366744] env[63028]: DEBUG nova.virt.hardware [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 649.366862] env[63028]: DEBUG nova.virt.hardware [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 649.367107] env[63028]: DEBUG nova.virt.hardware [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 649.367423] env[63028]: DEBUG nova.virt.hardware [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 649.367423] env[63028]: DEBUG nova.virt.hardware [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 649.367567] env[63028]: DEBUG nova.virt.hardware [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 649.367733] env[63028]: DEBUG nova.virt.hardware [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 649.369059] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46412b06-f003-4eec-b0b9-c525b36392c6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.385368] env[63028]: DEBUG oslo_vmware.rw_handles [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52221a6c-7dab-405c-d717-acee72f28fc5/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 649.387349] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817dfbf2-ccae-40bc-b32c-0eec39ac5358 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.392819] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49658c60-cd36-4859-bd12-10d1254cba78 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.417052] env[63028]: DEBUG oslo_vmware.rw_handles [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52221a6c-7dab-405c-d717-acee72f28fc5/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 649.417052] env[63028]: ERROR oslo_vmware.rw_handles [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52221a6c-7dab-405c-d717-acee72f28fc5/disk-0.vmdk due to incomplete transfer. [ 649.417052] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f6bd17eb-98a8-451e-9cbb-83fe3c957b0b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.430618] env[63028]: DEBUG oslo_vmware.rw_handles [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52221a6c-7dab-405c-d717-acee72f28fc5/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 649.430865] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Uploaded image f70c3182-7f65-419e-a89d-18cac4191ed9 to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 649.432849] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 649.433576] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-17c53c2a-3553-459c-a8b9-fed051b89f75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.442862] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 649.442862] env[63028]: value = "task-2735133" [ 649.442862] env[63028]: _type = "Task" [ 649.442862] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.452982] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735133, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.554018] env[63028]: DEBUG nova.network.neutron [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.573445] env[63028]: DEBUG nova.scheduler.client.report [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 649.674272] env[63028]: DEBUG nova.network.neutron [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.909769] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.958039] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735133, 'name': Destroy_Task, 'duration_secs': 0.34494} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.959368] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Destroyed the VM [ 649.959368] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 649.959368] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-74bbd807-d3d0-4442-ba45-7a07d96547bf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.969559] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 649.969559] env[63028]: value = "task-2735134" [ 649.969559] env[63028]: _type = "Task" [ 649.969559] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.979350] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735134, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.084391] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.796s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.084902] env[63028]: DEBUG nova.compute.manager [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 650.092744] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.278s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.092868] env[63028]: DEBUG nova.objects.instance [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lazy-loading 'resources' on Instance uuid f311a533-5c48-410b-ba3b-58f0032c8816 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 650.097219] env[63028]: DEBUG nova.compute.manager [req-be057fb2-7996-4261-823f-a7f38ba3cfcf req-0e23117f-4cfe-4c57-9a13-50eed845c1ae service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Received event network-changed-cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 650.097219] env[63028]: DEBUG nova.compute.manager [req-be057fb2-7996-4261-823f-a7f38ba3cfcf req-0e23117f-4cfe-4c57-9a13-50eed845c1ae service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Refreshing instance network info cache due to event network-changed-cc6af35d-7e46-40e6-bc97-40efda1ab807. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 650.097219] env[63028]: DEBUG oslo_concurrency.lockutils [req-be057fb2-7996-4261-823f-a7f38ba3cfcf req-0e23117f-4cfe-4c57-9a13-50eed845c1ae service nova] Acquiring lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.097219] env[63028]: DEBUG oslo_concurrency.lockutils [req-be057fb2-7996-4261-823f-a7f38ba3cfcf req-0e23117f-4cfe-4c57-9a13-50eed845c1ae service nova] Acquired lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.097219] env[63028]: DEBUG nova.network.neutron [req-be057fb2-7996-4261-823f-a7f38ba3cfcf req-0e23117f-4cfe-4c57-9a13-50eed845c1ae service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Refreshing network info cache for port cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 650.178978] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Releasing lock "refresh_cache-2ae111f7-4eaa-46c2-ab97-907daa913834" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.179211] env[63028]: DEBUG nova.compute.manager [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 650.179416] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 650.183016] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ca509f-8863-4402-b60a-6c790164098f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.190712] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 650.191064] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e29d9f64-753a-488f-a973-52800401640e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.201119] env[63028]: DEBUG oslo_vmware.api [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 650.201119] env[63028]: value = "task-2735135" [ 650.201119] env[63028]: _type = "Task" [ 650.201119] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.211753] env[63028]: DEBUG oslo_vmware.api [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735135, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.484709] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735134, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.593494] env[63028]: DEBUG nova.compute.utils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 650.597393] env[63028]: DEBUG nova.compute.manager [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 650.597393] env[63028]: DEBUG nova.network.neutron [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 650.714778] env[63028]: DEBUG oslo_vmware.api [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735135, 'name': PowerOffVM_Task, 'duration_secs': 0.250103} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.717817] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 650.718178] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 650.719792] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d32ed37f-e137-41d7-9eee-b4663a9f0755 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.721876] env[63028]: DEBUG oslo_concurrency.lockutils [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Acquiring lock "99886410-ec47-46ad-9d07-ee3593006997" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.721914] env[63028]: DEBUG oslo_concurrency.lockutils [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Lock "99886410-ec47-46ad-9d07-ee3593006997" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.722583] env[63028]: DEBUG oslo_concurrency.lockutils [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Acquiring lock "99886410-ec47-46ad-9d07-ee3593006997-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.722583] env[63028]: DEBUG oslo_concurrency.lockutils [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Lock "99886410-ec47-46ad-9d07-ee3593006997-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.722583] env[63028]: DEBUG oslo_concurrency.lockutils [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Lock "99886410-ec47-46ad-9d07-ee3593006997-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.730541] env[63028]: INFO nova.compute.manager [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Terminating instance [ 650.765236] env[63028]: DEBUG nova.policy [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8490a2071a04695a6e2702430c2a91a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfded6a4a6994d558e1c9c823d962d8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 650.771746] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 650.771961] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 650.772171] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Deleting the datastore file [datastore1] 2ae111f7-4eaa-46c2-ab97-907daa913834 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 650.772699] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7dc79c8-a3df-49ec-9742-1be76de8bb5c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.786955] env[63028]: DEBUG oslo_vmware.api [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for the task: (returnval){ [ 650.786955] env[63028]: value = "task-2735138" [ 650.786955] env[63028]: _type = "Task" [ 650.786955] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.798307] env[63028]: DEBUG oslo_vmware.api [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735138, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.965140] env[63028]: DEBUG nova.network.neutron [req-be057fb2-7996-4261-823f-a7f38ba3cfcf req-0e23117f-4cfe-4c57-9a13-50eed845c1ae service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updated VIF entry in instance network info cache for port cc6af35d-7e46-40e6-bc97-40efda1ab807. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 650.965652] env[63028]: DEBUG nova.network.neutron [req-be057fb2-7996-4261-823f-a7f38ba3cfcf req-0e23117f-4cfe-4c57-9a13-50eed845c1ae service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updating instance_info_cache with network_info: [{"id": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "address": "fa:16:3e:a3:ea:21", "network": {"id": "bbda1d1e-8f2d-4594-9711-57cd1f5c1d06", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-842445812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ed58d3e63604c2fac29e5744fd7f0bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6af35d-7e", "ovs_interfaceid": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.989834] env[63028]: DEBUG oslo_vmware.api [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735134, 'name': RemoveSnapshot_Task, 'duration_secs': 0.635445} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.990935] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 650.991034] env[63028]: INFO nova.compute.manager [None req-56470efe-5faa-4735-b857-928b7027d902 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Took 16.03 seconds to snapshot the instance on the hypervisor. [ 651.031426] env[63028]: DEBUG nova.network.neutron [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Successfully updated port: 39f160e2-809e-4b2c-9424-70448b807385 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 651.106606] env[63028]: DEBUG nova.compute.manager [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 651.237021] env[63028]: DEBUG nova.compute.manager [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 651.237021] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 651.237021] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5580b9c4-399b-4d56-b640-4418901a03de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.252931] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 651.253267] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70752038-2f9d-424d-b8cf-e6df5fa70c4e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.258243] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5040123-335a-4970-be8d-086c6248f559 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.263059] env[63028]: DEBUG oslo_vmware.api [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Waiting for the task: (returnval){ [ 651.263059] env[63028]: value = "task-2735139" [ 651.263059] env[63028]: _type = "Task" [ 651.263059] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.270439] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f9f8ec-bb44-4b97-8631-bb7a703f5bd6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.276741] env[63028]: DEBUG oslo_vmware.api [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735139, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.314710] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae22bda-aade-4df2-940c-dae4f4e301cf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.325108] env[63028]: DEBUG oslo_vmware.api [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Task: {'id': task-2735138, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159967} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.326014] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 651.326273] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 651.326477] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 651.326681] env[63028]: INFO nova.compute.manager [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Took 1.15 seconds to destroy the instance on the hypervisor. [ 651.326922] env[63028]: DEBUG oslo.service.loopingcall [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 651.327250] env[63028]: DEBUG nova.compute.manager [-] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 651.327362] env[63028]: DEBUG nova.network.neutron [-] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 651.330224] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b5ea33-4e8d-4c75-8dfe-f1c0aa5b34f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.354937] env[63028]: DEBUG nova.compute.provider_tree [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.409918] env[63028]: DEBUG nova.network.neutron [-] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.472454] env[63028]: DEBUG oslo_concurrency.lockutils [req-be057fb2-7996-4261-823f-a7f38ba3cfcf req-0e23117f-4cfe-4c57-9a13-50eed845c1ae service nova] Releasing lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.536027] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.536027] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.536027] env[63028]: DEBUG nova.network.neutron [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 651.614164] env[63028]: DEBUG nova.network.neutron [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Successfully created port: 064a77e5-92b4-4a3f-9a42-bd577ec46683 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.773827] env[63028]: DEBUG oslo_vmware.api [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735139, 'name': PowerOffVM_Task, 'duration_secs': 0.326856} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.774114] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.776012] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.776012] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33804d07-9697-4215-9dbd-b04504842c88 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.854603] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.854603] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.854603] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Deleting the datastore file [datastore1] 99886410-ec47-46ad-9d07-ee3593006997 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.854603] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce26286a-62d3-41d2-b3a6-fc65b7a7f8a1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.858764] env[63028]: DEBUG nova.scheduler.client.report [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 651.867359] env[63028]: DEBUG oslo_vmware.api [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Waiting for the task: (returnval){ [ 651.867359] env[63028]: value = "task-2735141" [ 651.867359] env[63028]: _type = "Task" [ 651.867359] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.882085] env[63028]: DEBUG oslo_vmware.api [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735141, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.910907] env[63028]: DEBUG nova.network.neutron [-] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.109794] env[63028]: DEBUG nova.network.neutron [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.124300] env[63028]: DEBUG nova.compute.manager [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 652.154356] env[63028]: DEBUG nova.virt.hardware [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 652.154605] env[63028]: DEBUG nova.virt.hardware [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 652.154760] env[63028]: DEBUG nova.virt.hardware [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 652.154940] env[63028]: DEBUG nova.virt.hardware [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 652.156192] env[63028]: DEBUG nova.virt.hardware [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 652.156391] env[63028]: DEBUG nova.virt.hardware [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 652.156614] env[63028]: DEBUG nova.virt.hardware [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 652.156784] env[63028]: DEBUG nova.virt.hardware [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 652.156952] env[63028]: DEBUG nova.virt.hardware [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 652.157167] env[63028]: DEBUG nova.virt.hardware [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 652.157357] env[63028]: DEBUG nova.virt.hardware [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 652.158303] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5301e4f5-9095-4bd7-a00c-c8b4b1135cc3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.171358] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d74c46c-4dab-430b-8b53-bd7db781a926 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.279054] env[63028]: DEBUG nova.compute.manager [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Received event network-vif-plugged-39f160e2-809e-4b2c-9424-70448b807385 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 652.279324] env[63028]: DEBUG oslo_concurrency.lockutils [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] Acquiring lock "5a340e31-678c-437e-aa4e-07d5d9f4334f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.281690] env[63028]: DEBUG oslo_concurrency.lockutils [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] Lock "5a340e31-678c-437e-aa4e-07d5d9f4334f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.281900] env[63028]: DEBUG oslo_concurrency.lockutils [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] Lock "5a340e31-678c-437e-aa4e-07d5d9f4334f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.282095] env[63028]: DEBUG nova.compute.manager [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] No waiting events found dispatching network-vif-plugged-39f160e2-809e-4b2c-9424-70448b807385 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 652.282269] env[63028]: WARNING nova.compute.manager [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Received unexpected event network-vif-plugged-39f160e2-809e-4b2c-9424-70448b807385 for instance with vm_state building and task_state spawning. [ 652.282433] env[63028]: DEBUG nova.compute.manager [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Received event network-changed-39f160e2-809e-4b2c-9424-70448b807385 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 652.282589] env[63028]: DEBUG nova.compute.manager [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Refreshing instance network info cache due to event network-changed-39f160e2-809e-4b2c-9424-70448b807385. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 652.282774] env[63028]: DEBUG oslo_concurrency.lockutils [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] Acquiring lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.364301] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.272s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.367868] env[63028]: DEBUG oslo_concurrency.lockutils [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.386s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.367868] env[63028]: DEBUG nova.objects.instance [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lazy-loading 'resources' on Instance uuid 94b1bf30-0f9b-4197-99ff-6631a13ab2d1 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 652.381403] env[63028]: DEBUG oslo_vmware.api [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Task: {'id': task-2735141, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165631} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.381685] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 652.381882] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 652.382066] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 652.382263] env[63028]: INFO nova.compute.manager [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Took 1.15 seconds to destroy the instance on the hypervisor. [ 652.382535] env[63028]: DEBUG oslo.service.loopingcall [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.382737] env[63028]: DEBUG nova.compute.manager [-] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 652.382832] env[63028]: DEBUG nova.network.neutron [-] [instance: 99886410-ec47-46ad-9d07-ee3593006997] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 652.403639] env[63028]: INFO nova.scheduler.client.report [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleted allocations for instance f311a533-5c48-410b-ba3b-58f0032c8816 [ 652.405918] env[63028]: DEBUG nova.network.neutron [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Updating instance_info_cache with network_info: [{"id": "39f160e2-809e-4b2c-9424-70448b807385", "address": "fa:16:3e:f5:5d:b4", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39f160e2-80", "ovs_interfaceid": "39f160e2-809e-4b2c-9424-70448b807385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.413560] env[63028]: INFO nova.compute.manager [-] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Took 1.09 seconds to deallocate network for instance. [ 652.772972] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Acquiring lock "c9cc1ac7-06c6-415b-86ce-daf4849bfc05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.773221] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lock "c9cc1ac7-06c6-415b-86ce-daf4849bfc05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.773439] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Acquiring lock "c9cc1ac7-06c6-415b-86ce-daf4849bfc05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.773620] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lock "c9cc1ac7-06c6-415b-86ce-daf4849bfc05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.773818] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lock "c9cc1ac7-06c6-415b-86ce-daf4849bfc05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.775828] env[63028]: INFO nova.compute.manager [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Terminating instance [ 652.911589] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.911915] env[63028]: DEBUG nova.compute.manager [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Instance network_info: |[{"id": "39f160e2-809e-4b2c-9424-70448b807385", "address": "fa:16:3e:f5:5d:b4", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39f160e2-80", "ovs_interfaceid": "39f160e2-809e-4b2c-9424-70448b807385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 652.916649] env[63028]: DEBUG oslo_concurrency.lockutils [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] Acquired lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.916837] env[63028]: DEBUG nova.network.neutron [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Refreshing network info cache for port 39f160e2-809e-4b2c-9424-70448b807385 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 652.918204] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:5d:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39f160e2-809e-4b2c-9424-70448b807385', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.926017] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Creating folder: Project (25a6457f62d149629c09589feb1a550c). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.929432] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe6cbe68-33e9-494c-908d-9d22e427dcda tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "f311a533-5c48-410b-ba3b-58f0032c8816" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.134s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.930862] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.931108] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df01a05f-a990-452b-a829-bee57ea8475a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.948611] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Created folder: Project (25a6457f62d149629c09589feb1a550c) in parent group-v550570. [ 652.948611] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Creating folder: Instances. Parent ref: group-v550663. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.949240] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ef23972-9d89-4590-8a93-34028097f97b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.962621] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Created folder: Instances in parent group-v550663. [ 652.962621] env[63028]: DEBUG oslo.service.loopingcall [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.962621] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 652.962869] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8add3ed0-1dea-4c6b-9259-72b8771c0a93 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.993449] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.993449] env[63028]: value = "task-2735145" [ 652.993449] env[63028]: _type = "Task" [ 652.993449] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.005538] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735145, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.078989] env[63028]: DEBUG nova.compute.manager [req-8af53fa9-cf45-4467-86c2-5024355d1bcb req-d97735c7-c677-4c90-8d53-1faa0786d5be service nova] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Received event network-vif-deleted-480124ee-5fda-4fb0-ab85-6641aaa08fac {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 653.079233] env[63028]: INFO nova.compute.manager [req-8af53fa9-cf45-4467-86c2-5024355d1bcb req-d97735c7-c677-4c90-8d53-1faa0786d5be service nova] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Neutron deleted interface 480124ee-5fda-4fb0-ab85-6641aaa08fac; detaching it from the instance and deleting it from the info cache [ 653.079414] env[63028]: DEBUG nova.network.neutron [req-8af53fa9-cf45-4467-86c2-5024355d1bcb req-d97735c7-c677-4c90-8d53-1faa0786d5be service nova] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.216805] env[63028]: DEBUG nova.network.neutron [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Updated VIF entry in instance network info cache for port 39f160e2-809e-4b2c-9424-70448b807385. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 653.217280] env[63028]: DEBUG nova.network.neutron [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Updating instance_info_cache with network_info: [{"id": "39f160e2-809e-4b2c-9424-70448b807385", "address": "fa:16:3e:f5:5d:b4", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39f160e2-80", "ovs_interfaceid": "39f160e2-809e-4b2c-9424-70448b807385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.280061] env[63028]: DEBUG nova.compute.manager [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 653.280314] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 653.281271] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8af930c-7980-4173-9ab8-9a5e4c4125bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.294327] env[63028]: DEBUG nova.network.neutron [-] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.305479] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 653.306175] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9510b075-4e2e-4a45-ba8a-0b85a228c821 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.315082] env[63028]: DEBUG oslo_vmware.api [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 653.315082] env[63028]: value = "task-2735146" [ 653.315082] env[63028]: _type = "Task" [ 653.315082] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.329213] env[63028]: DEBUG oslo_vmware.api [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735146, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.488964] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da73fd5-bf44-4c21-8b18-d7f4a078df82 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.500928] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50449bd5-7557-462f-8b22-026bbdc3cef9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.508560] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735145, 'name': CreateVM_Task, 'duration_secs': 0.399486} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.509178] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 653.509915] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.510100] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.510426] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 653.510748] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbb70e3e-48cc-45c6-949d-8bbd90bd2da4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.539760] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af0b0da-f0be-4a89-8475-694fcbfbdde7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.544157] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 653.544157] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5272b426-4c51-c591-77e3-f910a6da3437" [ 653.544157] env[63028]: _type = "Task" [ 653.544157] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.551609] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dfe7d38-15e9-4f01-ae0d-d3b071980382 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.559954] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5272b426-4c51-c591-77e3-f910a6da3437, 'name': SearchDatastore_Task, 'duration_secs': 0.01087} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.560706] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.564016] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 653.564016] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.564016] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.564016] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.564016] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6f3df58-917d-4579-8514-695db1482050 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.573646] env[63028]: DEBUG nova.compute.provider_tree [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.582644] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51e031e3-70a0-49de-93ed-a3208136ab5c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.586799] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.586799] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 653.587830] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b599f9f-5bff-4609-b7c7-f64dc8fcfee4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.597786] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03ddd05-6932-468a-a0b6-ec14caa84f2d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.610151] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 653.610151] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f8986a-441c-97c1-f124-389e947fca91" [ 653.610151] env[63028]: _type = "Task" [ 653.610151] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.620744] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f8986a-441c-97c1-f124-389e947fca91, 'name': SearchDatastore_Task, 'duration_secs': 0.009331} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.621134] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f595642-9bda-4394-a0bb-5ca815998e60 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.634778] env[63028]: DEBUG nova.compute.manager [req-8af53fa9-cf45-4467-86c2-5024355d1bcb req-d97735c7-c677-4c90-8d53-1faa0786d5be service nova] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Detach interface failed, port_id=480124ee-5fda-4fb0-ab85-6641aaa08fac, reason: Instance 99886410-ec47-46ad-9d07-ee3593006997 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 653.639516] env[63028]: DEBUG nova.network.neutron [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Successfully updated port: 064a77e5-92b4-4a3f-9a42-bd577ec46683 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 653.643576] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 653.643576] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5220c6a9-a34a-83b7-e0d3-3142f6cc42e4" [ 653.643576] env[63028]: _type = "Task" [ 653.643576] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.656279] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5220c6a9-a34a-83b7-e0d3-3142f6cc42e4, 'name': SearchDatastore_Task, 'duration_secs': 0.010717} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.656279] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.656279] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 5a340e31-678c-437e-aa4e-07d5d9f4334f/5a340e31-678c-437e-aa4e-07d5d9f4334f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 653.656658] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69e56e93-ed48-4729-8663-cccd3382c13b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.666849] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 653.666849] env[63028]: value = "task-2735147" [ 653.666849] env[63028]: _type = "Task" [ 653.666849] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.676322] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735147, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.697080] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Acquiring lock "352ac7c3-17a8-4d7e-a66f-47ea7614892c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.697080] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Lock "352ac7c3-17a8-4d7e-a66f-47ea7614892c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.720251] env[63028]: DEBUG oslo_concurrency.lockutils [req-8abfc8ca-a63f-47b1-bbb4-179ade912a6e req-314c06cf-d2e4-4179-872d-918841967ba4 service nova] Releasing lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.803144] env[63028]: INFO nova.compute.manager [-] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Took 1.42 seconds to deallocate network for instance. [ 653.828713] env[63028]: DEBUG oslo_vmware.api [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735146, 'name': PowerOffVM_Task, 'duration_secs': 0.212349} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.828932] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 653.829117] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 653.829383] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e48587a4-75c0-45c9-8708-8f21675c2638 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.934794] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 653.934794] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 653.934794] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Deleting the datastore file [datastore2] c9cc1ac7-06c6-415b-86ce-daf4849bfc05 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 653.936362] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9e11b7e-6cc7-447a-abc2-b01853e10cce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.944360] env[63028]: DEBUG oslo_vmware.api [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for the task: (returnval){ [ 653.944360] env[63028]: value = "task-2735149" [ 653.944360] env[63028]: _type = "Task" [ 653.944360] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.957304] env[63028]: DEBUG oslo_vmware.api [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735149, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.077366] env[63028]: DEBUG nova.scheduler.client.report [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 654.140142] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "refresh_cache-86d5d264-7a7a-434b-a1c4-e9a004c0a034" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.140142] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquired lock "refresh_cache-86d5d264-7a7a-434b-a1c4-e9a004c0a034" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.140142] env[63028]: DEBUG nova.network.neutron [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 654.176915] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735147, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498381} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.178612] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 5a340e31-678c-437e-aa4e-07d5d9f4334f/5a340e31-678c-437e-aa4e-07d5d9f4334f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 654.178612] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 654.178612] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c42e7f8-074b-4f53-aa81-364b033279f6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.185900] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 654.185900] env[63028]: value = "task-2735150" [ 654.185900] env[63028]: _type = "Task" [ 654.185900] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.196294] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735150, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.311218] env[63028]: DEBUG oslo_concurrency.lockutils [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.465768] env[63028]: DEBUG oslo_vmware.api [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Task: {'id': task-2735149, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.314511} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.465768] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 654.465768] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 654.465768] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 654.465768] env[63028]: INFO nova.compute.manager [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Took 1.19 seconds to destroy the instance on the hypervisor. [ 654.467323] env[63028]: DEBUG oslo.service.loopingcall [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 654.467323] env[63028]: DEBUG nova.compute.manager [-] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 654.467323] env[63028]: DEBUG nova.network.neutron [-] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 654.472356] env[63028]: DEBUG nova.compute.manager [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Received event network-vif-plugged-064a77e5-92b4-4a3f-9a42-bd577ec46683 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 654.472356] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] Acquiring lock "86d5d264-7a7a-434b-a1c4-e9a004c0a034-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.472356] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] Lock "86d5d264-7a7a-434b-a1c4-e9a004c0a034-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.472356] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] Lock "86d5d264-7a7a-434b-a1c4-e9a004c0a034-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.472534] env[63028]: DEBUG nova.compute.manager [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] No waiting events found dispatching network-vif-plugged-064a77e5-92b4-4a3f-9a42-bd577ec46683 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 654.472748] env[63028]: WARNING nova.compute.manager [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Received unexpected event network-vif-plugged-064a77e5-92b4-4a3f-9a42-bd577ec46683 for instance with vm_state building and task_state spawning. [ 654.472958] env[63028]: DEBUG nova.compute.manager [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Received event network-changed-064a77e5-92b4-4a3f-9a42-bd577ec46683 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 654.473178] env[63028]: DEBUG nova.compute.manager [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Refreshing instance network info cache due to event network-changed-064a77e5-92b4-4a3f-9a42-bd577ec46683. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 654.473427] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] Acquiring lock "refresh_cache-86d5d264-7a7a-434b-a1c4-e9a004c0a034" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.584270] env[63028]: DEBUG oslo_concurrency.lockutils [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.217s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.586745] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 38.334s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.586960] env[63028]: DEBUG nova.objects.instance [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63028) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 654.624690] env[63028]: INFO nova.scheduler.client.report [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Deleted allocations for instance 94b1bf30-0f9b-4197-99ff-6631a13ab2d1 [ 654.691745] env[63028]: DEBUG nova.network.neutron [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.701275] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735150, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085299} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.701275] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 654.702141] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8698155-d284-47c2-b478-9bb46f529f3f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.732664] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 5a340e31-678c-437e-aa4e-07d5d9f4334f/5a340e31-678c-437e-aa4e-07d5d9f4334f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 654.735801] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58d92123-58b6-4c08-99dc-ed9f4022947b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.759056] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 654.759056] env[63028]: value = "task-2735152" [ 654.759056] env[63028]: _type = "Task" [ 654.759056] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.769699] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735152, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.949099] env[63028]: DEBUG nova.network.neutron [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Updating instance_info_cache with network_info: [{"id": "064a77e5-92b4-4a3f-9a42-bd577ec46683", "address": "fa:16:3e:1a:08:49", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap064a77e5-92", "ovs_interfaceid": "064a77e5-92b4-4a3f-9a42-bd577ec46683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.138668] env[63028]: DEBUG oslo_concurrency.lockutils [None req-154bc663-e50a-4893-813f-d703ddd97a79 tempest-ServersAdmin275Test-1804329420 tempest-ServersAdmin275Test-1804329420-project-member] Lock "94b1bf30-0f9b-4197-99ff-6631a13ab2d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.543s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.274030] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735152, 'name': ReconfigVM_Task, 'duration_secs': 0.461031} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.274030] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 5a340e31-678c-437e-aa4e-07d5d9f4334f/5a340e31-678c-437e-aa4e-07d5d9f4334f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 655.274030] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca822445-6de1-4c66-94cf-9ad41c22d3a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.280254] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 655.280254] env[63028]: value = "task-2735153" [ 655.280254] env[63028]: _type = "Task" [ 655.280254] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.284512] env[63028]: DEBUG nova.network.neutron [-] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.294583] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735153, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.452160] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Releasing lock "refresh_cache-86d5d264-7a7a-434b-a1c4-e9a004c0a034" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.452671] env[63028]: DEBUG nova.compute.manager [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Instance network_info: |[{"id": "064a77e5-92b4-4a3f-9a42-bd577ec46683", "address": "fa:16:3e:1a:08:49", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap064a77e5-92", "ovs_interfaceid": "064a77e5-92b4-4a3f-9a42-bd577ec46683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 655.452783] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] Acquired lock "refresh_cache-86d5d264-7a7a-434b-a1c4-e9a004c0a034" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.452980] env[63028]: DEBUG nova.network.neutron [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Refreshing network info cache for port 064a77e5-92b4-4a3f-9a42-bd577ec46683 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 655.454654] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:08:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '064a77e5-92b4-4a3f-9a42-bd577ec46683', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 655.462583] env[63028]: DEBUG oslo.service.loopingcall [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 655.463674] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 655.463908] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c96fe3c-46b8-4fc5-b381-13369034912b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.487377] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 655.487377] env[63028]: value = "task-2735154" [ 655.487377] env[63028]: _type = "Task" [ 655.487377] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.500161] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735154, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.599350] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4ea86be-f4b7-4838-b9cd-5f32319112a9 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.600934] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.226s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.603015] env[63028]: INFO nova.compute.claims [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.795141] env[63028]: INFO nova.compute.manager [-] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Took 1.33 seconds to deallocate network for instance. [ 655.795500] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735153, 'name': Rename_Task, 'duration_secs': 0.170182} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.797330] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 655.797882] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3f0ca20-5354-44e6-a0cb-1e2bdc4cdddc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.813274] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 655.813274] env[63028]: value = "task-2735155" [ 655.813274] env[63028]: _type = "Task" [ 655.813274] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.823533] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.000198] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735154, 'name': CreateVM_Task, 'duration_secs': 0.358426} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.000371] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 656.001049] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.001224] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.001534] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 656.001783] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35aaea11-b6f4-403d-a343-14b5cdb9c2d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.008205] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 656.008205] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523e1368-8338-5c19-1b50-b7fd101abbb8" [ 656.008205] env[63028]: _type = "Task" [ 656.008205] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.018738] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523e1368-8338-5c19-1b50-b7fd101abbb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.228850] env[63028]: DEBUG nova.network.neutron [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Updated VIF entry in instance network info cache for port 064a77e5-92b4-4a3f-9a42-bd577ec46683. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 656.228850] env[63028]: DEBUG nova.network.neutron [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Updating instance_info_cache with network_info: [{"id": "064a77e5-92b4-4a3f-9a42-bd577ec46683", "address": "fa:16:3e:1a:08:49", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap064a77e5-92", "ovs_interfaceid": "064a77e5-92b4-4a3f-9a42-bd577ec46683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.308721] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.324568] env[63028]: DEBUG oslo_vmware.api [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735155, 'name': PowerOnVM_Task, 'duration_secs': 0.480348} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.325104] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 656.325418] env[63028]: INFO nova.compute.manager [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Took 6.99 seconds to spawn the instance on the hypervisor. [ 656.328018] env[63028]: DEBUG nova.compute.manager [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 656.328018] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6529260f-f9d2-450e-b2d6-d2bd0be6820d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.498153] env[63028]: DEBUG nova.compute.manager [req-fe608d60-54ce-4074-b1fd-7f34433a9f32 req-e5285a52-b04e-4078-aa75-a6695b904ab1 service nova] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Received event network-vif-deleted-b8893183-7887-4add-9ad9-389b74e74c81 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 656.518479] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523e1368-8338-5c19-1b50-b7fd101abbb8, 'name': SearchDatastore_Task, 'duration_secs': 0.013403} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.518773] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.519011] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 656.519327] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.519479] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.519658] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 656.520167] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf44c1d7-16b2-4ec8-a96b-8cc4877dae77 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.529878] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 656.529878] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 656.530196] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab802606-b0dd-408e-b913-dc639fbbda4c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.537713] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 656.537713] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e1db3b-a785-c15c-9b00-fd7728297f52" [ 656.537713] env[63028]: _type = "Task" [ 656.537713] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.546941] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e1db3b-a785-c15c-9b00-fd7728297f52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.731734] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbf99b87-5c03-4b5a-8ab2-d68823076055 req-e098a15f-6cc7-4375-8794-3f19d4100cdf service nova] Releasing lock "refresh_cache-86d5d264-7a7a-434b-a1c4-e9a004c0a034" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.848204] env[63028]: INFO nova.compute.manager [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Took 44.53 seconds to build instance. [ 657.063283] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e1db3b-a785-c15c-9b00-fd7728297f52, 'name': SearchDatastore_Task, 'duration_secs': 0.013406} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.063802] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8338e569-f5c3-413c-9d3c-363b80aa8fab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.070842] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 657.070842] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524eb4aa-57bd-5cd6-caf5-255eeda679eb" [ 657.070842] env[63028]: _type = "Task" [ 657.070842] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.084337] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524eb4aa-57bd-5cd6-caf5-255eeda679eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.181366] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed72ef6a-003e-4b36-a323-b8daa8207f11 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.189796] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d7429f-8b75-4d51-9800-ae8b0b14b91a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.222908] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f5214b-9c7c-4edb-82d2-f8b209426cd2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.231916] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea8b429-fc9e-4e0f-b8ff-6bf09d1be339 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.246563] env[63028]: DEBUG nova.compute.provider_tree [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.352304] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bbc4a681-f719-4c6e-ac1b-190ea2c48080 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.965s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.587964] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524eb4aa-57bd-5cd6-caf5-255eeda679eb, 'name': SearchDatastore_Task, 'duration_secs': 0.041712} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.587964] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.588279] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 86d5d264-7a7a-434b-a1c4-e9a004c0a034/86d5d264-7a7a-434b-a1c4-e9a004c0a034.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 657.588484] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d9e2ba7-863e-4368-90c9-32ed080ab8d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.601089] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 657.601089] env[63028]: value = "task-2735157" [ 657.601089] env[63028]: _type = "Task" [ 657.601089] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.610232] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735157, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.752771] env[63028]: DEBUG nova.scheduler.client.report [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 657.853780] env[63028]: DEBUG nova.compute.manager [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 658.115735] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735157, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.264031] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.663s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.264575] env[63028]: DEBUG nova.compute.manager [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 658.269509] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.528s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.272352] env[63028]: INFO nova.compute.claims [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 658.384280] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.571169] env[63028]: DEBUG nova.compute.manager [req-9b654ff0-d4ed-452b-ae37-23d0d69c74b1 req-e979d3d9-e9f0-4fa3-a3db-81f0285b3982 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Received event network-changed-39f160e2-809e-4b2c-9424-70448b807385 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 658.571339] env[63028]: DEBUG nova.compute.manager [req-9b654ff0-d4ed-452b-ae37-23d0d69c74b1 req-e979d3d9-e9f0-4fa3-a3db-81f0285b3982 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Refreshing instance network info cache due to event network-changed-39f160e2-809e-4b2c-9424-70448b807385. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 658.571545] env[63028]: DEBUG oslo_concurrency.lockutils [req-9b654ff0-d4ed-452b-ae37-23d0d69c74b1 req-e979d3d9-e9f0-4fa3-a3db-81f0285b3982 service nova] Acquiring lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.571682] env[63028]: DEBUG oslo_concurrency.lockutils [req-9b654ff0-d4ed-452b-ae37-23d0d69c74b1 req-e979d3d9-e9f0-4fa3-a3db-81f0285b3982 service nova] Acquired lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.571835] env[63028]: DEBUG nova.network.neutron [req-9b654ff0-d4ed-452b-ae37-23d0d69c74b1 req-e979d3d9-e9f0-4fa3-a3db-81f0285b3982 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Refreshing network info cache for port 39f160e2-809e-4b2c-9424-70448b807385 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 658.613274] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735157, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.548982} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.613274] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 86d5d264-7a7a-434b-a1c4-e9a004c0a034/86d5d264-7a7a-434b-a1c4-e9a004c0a034.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 658.613274] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 658.613274] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a65d560e-4725-4011-91ea-bd3de3a66177 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.621837] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 658.621837] env[63028]: value = "task-2735158" [ 658.621837] env[63028]: _type = "Task" [ 658.621837] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.636113] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735158, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.773084] env[63028]: DEBUG nova.compute.utils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 658.773084] env[63028]: DEBUG nova.compute.manager [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 658.773482] env[63028]: DEBUG nova.network.neutron [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.842016] env[63028]: DEBUG nova.policy [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32a3a4c8b9ef46bb9f1d927769b9dad9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d3152ab577947b28de82f4801285f8c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 659.139682] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735158, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074791} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.141290] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 659.141290] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bcb48dc-3985-4787-8ac1-8c926d8a4a62 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.174244] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] 86d5d264-7a7a-434b-a1c4-e9a004c0a034/86d5d264-7a7a-434b-a1c4-e9a004c0a034.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 659.175104] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4742d0e7-cacd-41bb-b001-46a0a33aa662 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.206449] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 659.206449] env[63028]: value = "task-2735159" [ 659.206449] env[63028]: _type = "Task" [ 659.206449] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.216059] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735159, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.250380] env[63028]: DEBUG nova.network.neutron [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Successfully created port: 66421979-b107-4dd5-9bc4-40bdefa3a5d0 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.277454] env[63028]: DEBUG nova.compute.manager [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 659.423137] env[63028]: DEBUG nova.network.neutron [req-9b654ff0-d4ed-452b-ae37-23d0d69c74b1 req-e979d3d9-e9f0-4fa3-a3db-81f0285b3982 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Updated VIF entry in instance network info cache for port 39f160e2-809e-4b2c-9424-70448b807385. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 659.423137] env[63028]: DEBUG nova.network.neutron [req-9b654ff0-d4ed-452b-ae37-23d0d69c74b1 req-e979d3d9-e9f0-4fa3-a3db-81f0285b3982 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Updating instance_info_cache with network_info: [{"id": "39f160e2-809e-4b2c-9424-70448b807385", "address": "fa:16:3e:f5:5d:b4", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39f160e2-80", "ovs_interfaceid": "39f160e2-809e-4b2c-9424-70448b807385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.701688] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "50e4934b-b9b1-4887-b5d1-95a37fbf4c41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.701688] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "50e4934b-b9b1-4887-b5d1-95a37fbf4c41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.720124] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735159, 'name': ReconfigVM_Task, 'duration_secs': 0.31413} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.720124] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Reconfigured VM instance instance-0000001b to attach disk [datastore2] 86d5d264-7a7a-434b-a1c4-e9a004c0a034/86d5d264-7a7a-434b-a1c4-e9a004c0a034.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 659.720439] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69647b69-9abe-4935-a747-6f23f698df1d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.729794] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 659.729794] env[63028]: value = "task-2735160" [ 659.729794] env[63028]: _type = "Task" [ 659.729794] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.744580] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735160, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.790515] env[63028]: DEBUG oslo_concurrency.lockutils [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Acquiring lock "600195de-ceb4-41a6-9ade-dda8b898e4db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.790736] env[63028]: DEBUG oslo_concurrency.lockutils [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lock "600195de-ceb4-41a6-9ade-dda8b898e4db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.901496] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab378fc7-3843-4b6c-bade-098346afd554 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.910103] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb962c4-880f-4c5a-80e1-332e3055b980 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.941965] env[63028]: DEBUG oslo_concurrency.lockutils [req-9b654ff0-d4ed-452b-ae37-23d0d69c74b1 req-e979d3d9-e9f0-4fa3-a3db-81f0285b3982 service nova] Releasing lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.943214] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c10fac-20b3-41d9-975e-75b237284cb6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.952613] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d54ec67-21dc-4905-9294-ffae6ed0a173 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.968052] env[63028]: DEBUG nova.compute.provider_tree [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.241888] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735160, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.292554] env[63028]: DEBUG nova.compute.manager [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 660.320811] env[63028]: DEBUG nova.virt.hardware [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 660.321100] env[63028]: DEBUG nova.virt.hardware [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.321269] env[63028]: DEBUG nova.virt.hardware [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 660.321511] env[63028]: DEBUG nova.virt.hardware [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.323472] env[63028]: DEBUG nova.virt.hardware [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 660.323726] env[63028]: DEBUG nova.virt.hardware [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 660.323910] env[63028]: DEBUG nova.virt.hardware [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 660.324083] env[63028]: DEBUG nova.virt.hardware [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 660.324261] env[63028]: DEBUG nova.virt.hardware [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 660.324432] env[63028]: DEBUG nova.virt.hardware [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 660.324582] env[63028]: DEBUG nova.virt.hardware [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 660.329018] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6fd9ba-df5b-41f9-95ca-33c98e92b079 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.335471] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e62cdf-069d-4cc7-b785-ffb1773155f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.471864] env[63028]: DEBUG nova.scheduler.client.report [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 660.746639] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735160, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.978029] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.708s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.978622] env[63028]: DEBUG nova.compute.manager [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 660.981193] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 36.662s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.137955] env[63028]: DEBUG nova.compute.manager [req-0a8ed97d-0b37-43ef-9990-1ef95caec362 req-a096463d-945a-42c0-a86f-b1218a0c9710 service nova] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Received event network-vif-plugged-66421979-b107-4dd5-9bc4-40bdefa3a5d0 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 661.138195] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a8ed97d-0b37-43ef-9990-1ef95caec362 req-a096463d-945a-42c0-a86f-b1218a0c9710 service nova] Acquiring lock "ba57ed92-aaef-460c-bd45-d0cbe09e4615-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.138432] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a8ed97d-0b37-43ef-9990-1ef95caec362 req-a096463d-945a-42c0-a86f-b1218a0c9710 service nova] Lock "ba57ed92-aaef-460c-bd45-d0cbe09e4615-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.138598] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a8ed97d-0b37-43ef-9990-1ef95caec362 req-a096463d-945a-42c0-a86f-b1218a0c9710 service nova] Lock "ba57ed92-aaef-460c-bd45-d0cbe09e4615-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.138766] env[63028]: DEBUG nova.compute.manager [req-0a8ed97d-0b37-43ef-9990-1ef95caec362 req-a096463d-945a-42c0-a86f-b1218a0c9710 service nova] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] No waiting events found dispatching network-vif-plugged-66421979-b107-4dd5-9bc4-40bdefa3a5d0 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 661.138939] env[63028]: WARNING nova.compute.manager [req-0a8ed97d-0b37-43ef-9990-1ef95caec362 req-a096463d-945a-42c0-a86f-b1218a0c9710 service nova] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Received unexpected event network-vif-plugged-66421979-b107-4dd5-9bc4-40bdefa3a5d0 for instance with vm_state building and task_state spawning. [ 661.149179] env[63028]: DEBUG nova.network.neutron [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Successfully updated port: 66421979-b107-4dd5-9bc4-40bdefa3a5d0 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 661.242433] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735160, 'name': Rename_Task, 'duration_secs': 1.156941} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.242433] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 661.242806] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-817015ad-20f4-49ec-a9be-fc3cda7013a0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.249864] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 661.249864] env[63028]: value = "task-2735161" [ 661.249864] env[63028]: _type = "Task" [ 661.249864] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.258037] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735161, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.487057] env[63028]: INFO nova.compute.claims [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 661.491853] env[63028]: DEBUG nova.compute.utils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 661.493777] env[63028]: DEBUG nova.compute.manager [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 661.493948] env[63028]: DEBUG nova.network.neutron [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 661.538030] env[63028]: DEBUG nova.policy [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32a3a4c8b9ef46bb9f1d927769b9dad9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d3152ab577947b28de82f4801285f8c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 661.652430] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "refresh_cache-ba57ed92-aaef-460c-bd45-d0cbe09e4615" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.652592] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "refresh_cache-ba57ed92-aaef-460c-bd45-d0cbe09e4615" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.652745] env[63028]: DEBUG nova.network.neutron [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 661.762459] env[63028]: DEBUG oslo_vmware.api [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735161, 'name': PowerOnVM_Task, 'duration_secs': 0.477191} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.762767] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 661.762962] env[63028]: INFO nova.compute.manager [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Took 9.64 seconds to spawn the instance on the hypervisor. [ 661.763154] env[63028]: DEBUG nova.compute.manager [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 661.763946] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e142478c-c725-4676-926e-2d7641af4a1a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.882602] env[63028]: DEBUG nova.network.neutron [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Successfully created port: a07ae27f-6861-4db7-be8b-8f9e4f170f3e {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 661.994222] env[63028]: DEBUG nova.compute.manager [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 662.002837] env[63028]: INFO nova.compute.resource_tracker [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating resource usage from migration 423e4fa8-4060-4a21-aa81-5c4eef279e90 [ 662.195251] env[63028]: DEBUG nova.network.neutron [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.284619] env[63028]: INFO nova.compute.manager [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Took 47.83 seconds to build instance. [ 662.349511] env[63028]: DEBUG nova.network.neutron [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Updating instance_info_cache with network_info: [{"id": "66421979-b107-4dd5-9bc4-40bdefa3a5d0", "address": "fa:16:3e:78:3c:3d", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66421979-b1", "ovs_interfaceid": "66421979-b107-4dd5-9bc4-40bdefa3a5d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.481292] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d5b003-caae-4531-a0c0-02c4ba39b24a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.490260] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7ff53f-51c8-43d8-9504-52a300a5eafc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.524857] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b2ef5a-0582-4f1a-879d-ff29de410e90 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.534126] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505f1aac-5c2e-4eeb-a4e0-a2b914917017 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.548513] env[63028]: DEBUG nova.compute.provider_tree [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.787161] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a552dd6-9917-4ffe-8484-017f00b84b26 tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "86d5d264-7a7a-434b-a1c4-e9a004c0a034" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.678s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.855418] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "refresh_cache-ba57ed92-aaef-460c-bd45-d0cbe09e4615" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.855768] env[63028]: DEBUG nova.compute.manager [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Instance network_info: |[{"id": "66421979-b107-4dd5-9bc4-40bdefa3a5d0", "address": "fa:16:3e:78:3c:3d", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66421979-b1", "ovs_interfaceid": "66421979-b107-4dd5-9bc4-40bdefa3a5d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 662.856210] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:3c:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66421979-b107-4dd5-9bc4-40bdefa3a5d0', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.863540] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Creating folder: Project (1d3152ab577947b28de82f4801285f8c). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 662.863807] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3055884b-cde6-4076-a46d-789b99b15e70 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.876454] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Created folder: Project (1d3152ab577947b28de82f4801285f8c) in parent group-v550570. [ 662.877169] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Creating folder: Instances. Parent ref: group-v550667. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 662.877169] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfc4c7c4-59e1-4b99-9d73-1706c33f3cee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.888732] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Created folder: Instances in parent group-v550667. [ 662.888732] env[63028]: DEBUG oslo.service.loopingcall [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.888951] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 662.889092] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-998e3298-dc18-4eaa-afd3-4743b18b8953 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.909100] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 662.909100] env[63028]: value = "task-2735164" [ 662.909100] env[63028]: _type = "Task" [ 662.909100] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.917332] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735164, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.029964] env[63028]: DEBUG nova.compute.manager [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 663.053226] env[63028]: DEBUG nova.scheduler.client.report [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 663.058758] env[63028]: DEBUG nova.virt.hardware [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 663.059513] env[63028]: DEBUG nova.virt.hardware [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 663.059513] env[63028]: DEBUG nova.virt.hardware [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 663.059513] env[63028]: DEBUG nova.virt.hardware [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 663.059513] env[63028]: DEBUG nova.virt.hardware [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 663.059802] env[63028]: DEBUG nova.virt.hardware [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 663.060110] env[63028]: DEBUG nova.virt.hardware [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 663.060290] env[63028]: DEBUG nova.virt.hardware [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 663.060471] env[63028]: DEBUG nova.virt.hardware [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 663.060709] env[63028]: DEBUG nova.virt.hardware [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 663.060920] env[63028]: DEBUG nova.virt.hardware [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 663.062147] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565676d8-30b2-48e0-bc8e-aebaffcd476e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.076105] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57122beb-10ab-4279-bd32-9497af85f3d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.265170] env[63028]: DEBUG nova.compute.manager [req-71d80adc-6182-42b3-b582-ad3e4710a5af req-4644bc8c-6d32-4faf-ba48-60b6e968666f service nova] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Received event network-changed-66421979-b107-4dd5-9bc4-40bdefa3a5d0 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 663.265170] env[63028]: DEBUG nova.compute.manager [req-71d80adc-6182-42b3-b582-ad3e4710a5af req-4644bc8c-6d32-4faf-ba48-60b6e968666f service nova] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Refreshing instance network info cache due to event network-changed-66421979-b107-4dd5-9bc4-40bdefa3a5d0. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 663.265170] env[63028]: DEBUG oslo_concurrency.lockutils [req-71d80adc-6182-42b3-b582-ad3e4710a5af req-4644bc8c-6d32-4faf-ba48-60b6e968666f service nova] Acquiring lock "refresh_cache-ba57ed92-aaef-460c-bd45-d0cbe09e4615" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.265170] env[63028]: DEBUG oslo_concurrency.lockutils [req-71d80adc-6182-42b3-b582-ad3e4710a5af req-4644bc8c-6d32-4faf-ba48-60b6e968666f service nova] Acquired lock "refresh_cache-ba57ed92-aaef-460c-bd45-d0cbe09e4615" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.265170] env[63028]: DEBUG nova.network.neutron [req-71d80adc-6182-42b3-b582-ad3e4710a5af req-4644bc8c-6d32-4faf-ba48-60b6e968666f service nova] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Refreshing network info cache for port 66421979-b107-4dd5-9bc4-40bdefa3a5d0 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 663.293025] env[63028]: DEBUG nova.compute.manager [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 663.420597] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735164, 'name': CreateVM_Task, 'duration_secs': 0.323859} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.420994] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 663.421798] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.422172] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.422770] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 663.423183] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c238ea2-7bcb-4d39-97c2-7e50dcffefdd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.429314] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 663.429314] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522744ad-38f0-a10d-052c-5ea3c756e8ff" [ 663.429314] env[63028]: _type = "Task" [ 663.429314] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.439192] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522744ad-38f0-a10d-052c-5ea3c756e8ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.567751] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.586s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.567978] env[63028]: INFO nova.compute.manager [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Migrating [ 663.568226] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.568414] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "compute-rpcapi-router" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.569731] env[63028]: DEBUG oslo_concurrency.lockutils [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.210s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.570608] env[63028]: DEBUG nova.objects.instance [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lazy-loading 'resources' on Instance uuid c7a3f2c6-8368-49cc-9737-ea1d836f1783 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 663.576814] env[63028]: DEBUG oslo_concurrency.lockutils [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "86d5d264-7a7a-434b-a1c4-e9a004c0a034" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.576814] env[63028]: DEBUG oslo_concurrency.lockutils [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "86d5d264-7a7a-434b-a1c4-e9a004c0a034" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.576814] env[63028]: DEBUG oslo_concurrency.lockutils [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "86d5d264-7a7a-434b-a1c4-e9a004c0a034-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.576814] env[63028]: DEBUG oslo_concurrency.lockutils [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "86d5d264-7a7a-434b-a1c4-e9a004c0a034-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.576980] env[63028]: DEBUG oslo_concurrency.lockutils [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "86d5d264-7a7a-434b-a1c4-e9a004c0a034-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.582512] env[63028]: INFO nova.compute.manager [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Terminating instance [ 663.599764] env[63028]: DEBUG nova.network.neutron [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Successfully updated port: a07ae27f-6861-4db7-be8b-8f9e4f170f3e {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 663.829418] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.944035] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522744ad-38f0-a10d-052c-5ea3c756e8ff, 'name': SearchDatastore_Task, 'duration_secs': 0.010616} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.944035] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.944035] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.944035] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.944285] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.944285] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 663.944285] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9589886-61cf-477c-9b19-ac7c0f0eca2b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.954039] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 663.954932] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 663.954932] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a512bc6-427b-4fa3-bb65-b2b365f38e27 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.961033] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 663.961033] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5209cfee-814b-8a3d-6b80-30f52914e78f" [ 663.961033] env[63028]: _type = "Task" [ 663.961033] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.970423] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5209cfee-814b-8a3d-6b80-30f52914e78f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.033721] env[63028]: DEBUG nova.network.neutron [req-71d80adc-6182-42b3-b582-ad3e4710a5af req-4644bc8c-6d32-4faf-ba48-60b6e968666f service nova] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Updated VIF entry in instance network info cache for port 66421979-b107-4dd5-9bc4-40bdefa3a5d0. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 664.034105] env[63028]: DEBUG nova.network.neutron [req-71d80adc-6182-42b3-b582-ad3e4710a5af req-4644bc8c-6d32-4faf-ba48-60b6e968666f service nova] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Updating instance_info_cache with network_info: [{"id": "66421979-b107-4dd5-9bc4-40bdefa3a5d0", "address": "fa:16:3e:78:3c:3d", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66421979-b1", "ovs_interfaceid": "66421979-b107-4dd5-9bc4-40bdefa3a5d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.079336] env[63028]: INFO nova.compute.rpcapi [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 664.079916] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "compute-rpcapi-router" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.087445] env[63028]: DEBUG nova.compute.manager [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 664.087445] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 664.088387] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61219734-6706-4a8a-8386-b97dc6203008 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.102019] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "refresh_cache-b9d9fe4e-438c-4f68-b011-9eb9e10a381c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.102190] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "refresh_cache-b9d9fe4e-438c-4f68-b011-9eb9e10a381c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.102553] env[63028]: DEBUG nova.network.neutron [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 664.107162] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 664.110205] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3961d5b-71a4-477f-bd37-4c23131b35d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.120489] env[63028]: DEBUG oslo_vmware.api [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 664.120489] env[63028]: value = "task-2735165" [ 664.120489] env[63028]: _type = "Task" [ 664.120489] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.134388] env[63028]: DEBUG oslo_vmware.api [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.472689] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5209cfee-814b-8a3d-6b80-30f52914e78f, 'name': SearchDatastore_Task, 'duration_secs': 0.021277} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.476425] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f78fc82f-4ca6-4282-bef3-f0a053b46d73 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.482336] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 664.482336] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526c968c-be04-1f5f-1613-1833e947ecbd" [ 664.482336] env[63028]: _type = "Task" [ 664.482336] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.495935] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526c968c-be04-1f5f-1613-1833e947ecbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.537148] env[63028]: DEBUG oslo_concurrency.lockutils [req-71d80adc-6182-42b3-b582-ad3e4710a5af req-4644bc8c-6d32-4faf-ba48-60b6e968666f service nova] Releasing lock "refresh_cache-ba57ed92-aaef-460c-bd45-d0cbe09e4615" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.554018] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8107a55b-453b-4ff4-89ad-e2b98242c2e1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.562299] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5813a711-5d38-4d1b-95df-585398373c23 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.595514] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554e8041-d563-47cd-9996-87ea03a9e4eb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.602879] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.603517] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.603517] env[63028]: DEBUG nova.network.neutron [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 664.609750] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1588c66a-2732-4eea-bcd8-01163329380b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.626208] env[63028]: DEBUG nova.compute.provider_tree [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.636660] env[63028]: DEBUG oslo_vmware.api [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735165, 'name': PowerOffVM_Task, 'duration_secs': 0.209469} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.636660] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 664.636660] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 664.637405] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34370608-07fb-4b67-85f0-3e2eb048ee16 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.660338] env[63028]: DEBUG nova.network.neutron [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.704934] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 664.705617] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 664.705617] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Deleting the datastore file [datastore2] 86d5d264-7a7a-434b-a1c4-e9a004c0a034 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 664.706186] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0cda39c-b34a-409c-99ed-2592fec470fb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.714122] env[63028]: DEBUG oslo_vmware.api [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for the task: (returnval){ [ 664.714122] env[63028]: value = "task-2735167" [ 664.714122] env[63028]: _type = "Task" [ 664.714122] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.724705] env[63028]: DEBUG oslo_vmware.api [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735167, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.837397] env[63028]: DEBUG nova.network.neutron [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Updating instance_info_cache with network_info: [{"id": "a07ae27f-6861-4db7-be8b-8f9e4f170f3e", "address": "fa:16:3e:d4:c3:72", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa07ae27f-68", "ovs_interfaceid": "a07ae27f-6861-4db7-be8b-8f9e4f170f3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.993598] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526c968c-be04-1f5f-1613-1833e947ecbd, 'name': SearchDatastore_Task, 'duration_secs': 0.017473} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.993890] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.994153] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] ba57ed92-aaef-460c-bd45-d0cbe09e4615/ba57ed92-aaef-460c-bd45-d0cbe09e4615.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 664.994409] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6be048ed-206b-40c5-88d8-efc63de4979d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.001530] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 665.001530] env[63028]: value = "task-2735168" [ 665.001530] env[63028]: _type = "Task" [ 665.001530] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.009486] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735168, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.131981] env[63028]: DEBUG nova.scheduler.client.report [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 665.231279] env[63028]: DEBUG oslo_vmware.api [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Task: {'id': task-2735167, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196549} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.231279] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 665.231530] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 665.231723] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 665.231981] env[63028]: INFO nova.compute.manager [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Took 1.14 seconds to destroy the instance on the hypervisor. [ 665.232417] env[63028]: DEBUG oslo.service.loopingcall [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 665.232724] env[63028]: DEBUG nova.compute.manager [-] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 665.232881] env[63028]: DEBUG nova.network.neutron [-] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 665.340529] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "refresh_cache-b9d9fe4e-438c-4f68-b011-9eb9e10a381c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.341101] env[63028]: DEBUG nova.compute.manager [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Instance network_info: |[{"id": "a07ae27f-6861-4db7-be8b-8f9e4f170f3e", "address": "fa:16:3e:d4:c3:72", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa07ae27f-68", "ovs_interfaceid": "a07ae27f-6861-4db7-be8b-8f9e4f170f3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 665.341333] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:c3:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a07ae27f-6861-4db7-be8b-8f9e4f170f3e', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 665.352955] env[63028]: DEBUG oslo.service.loopingcall [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 665.352955] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 665.353162] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c2f4772-f8df-4bde-a113-dbaf1d2e3d13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.377023] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 665.377023] env[63028]: value = "task-2735169" [ 665.377023] env[63028]: _type = "Task" [ 665.377023] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.379357] env[63028]: DEBUG nova.compute.manager [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Received event network-vif-plugged-a07ae27f-6861-4db7-be8b-8f9e4f170f3e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 665.379609] env[63028]: DEBUG oslo_concurrency.lockutils [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] Acquiring lock "b9d9fe4e-438c-4f68-b011-9eb9e10a381c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.379822] env[63028]: DEBUG oslo_concurrency.lockutils [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] Lock "b9d9fe4e-438c-4f68-b011-9eb9e10a381c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.380081] env[63028]: DEBUG oslo_concurrency.lockutils [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] Lock "b9d9fe4e-438c-4f68-b011-9eb9e10a381c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.380257] env[63028]: DEBUG nova.compute.manager [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] No waiting events found dispatching network-vif-plugged-a07ae27f-6861-4db7-be8b-8f9e4f170f3e {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 665.380460] env[63028]: WARNING nova.compute.manager [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Received unexpected event network-vif-plugged-a07ae27f-6861-4db7-be8b-8f9e4f170f3e for instance with vm_state building and task_state spawning. [ 665.380670] env[63028]: DEBUG nova.compute.manager [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Received event network-changed-a07ae27f-6861-4db7-be8b-8f9e4f170f3e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 665.380830] env[63028]: DEBUG nova.compute.manager [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Refreshing instance network info cache due to event network-changed-a07ae27f-6861-4db7-be8b-8f9e4f170f3e. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 665.381060] env[63028]: DEBUG oslo_concurrency.lockutils [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] Acquiring lock "refresh_cache-b9d9fe4e-438c-4f68-b011-9eb9e10a381c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.381203] env[63028]: DEBUG oslo_concurrency.lockutils [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] Acquired lock "refresh_cache-b9d9fe4e-438c-4f68-b011-9eb9e10a381c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.381394] env[63028]: DEBUG nova.network.neutron [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Refreshing network info cache for port a07ae27f-6861-4db7-be8b-8f9e4f170f3e {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 665.397840] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735169, 'name': CreateVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.426837] env[63028]: DEBUG nova.network.neutron [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance_info_cache with network_info: [{"id": "08a61148-5b3a-4bb0-a130-3eda62d6bf7c", "address": "fa:16:3e:8e:67:6c", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08a61148-5b", "ovs_interfaceid": "08a61148-5b3a-4bb0-a130-3eda62d6bf7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.514767] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735168, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472763} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.515062] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] ba57ed92-aaef-460c-bd45-d0cbe09e4615/ba57ed92-aaef-460c-bd45-d0cbe09e4615.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 665.515291] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 665.515557] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da99bd07-62b2-4a39-831f-028e753a0e60 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.522041] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 665.522041] env[63028]: value = "task-2735170" [ 665.522041] env[63028]: _type = "Task" [ 665.522041] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.532478] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735170, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.637459] env[63028]: DEBUG oslo_concurrency.lockutils [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.068s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.640029] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.211s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.640330] env[63028]: DEBUG nova.objects.instance [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lazy-loading 'resources' on Instance uuid 0dbafad1-ab21-439d-bc8e-e447ac33304e {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 665.711410] env[63028]: INFO nova.scheduler.client.report [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Deleted allocations for instance c7a3f2c6-8368-49cc-9737-ea1d836f1783 [ 665.731137] env[63028]: DEBUG nova.compute.manager [req-b711b7dd-9f42-4471-828f-81e172ba23f5 req-e3b39875-85b3-4dcc-9491-4b054003afb3 service nova] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Received event network-vif-deleted-064a77e5-92b4-4a3f-9a42-bd577ec46683 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 665.731798] env[63028]: INFO nova.compute.manager [req-b711b7dd-9f42-4471-828f-81e172ba23f5 req-e3b39875-85b3-4dcc-9491-4b054003afb3 service nova] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Neutron deleted interface 064a77e5-92b4-4a3f-9a42-bd577ec46683; detaching it from the instance and deleting it from the info cache [ 665.731798] env[63028]: DEBUG nova.network.neutron [req-b711b7dd-9f42-4471-828f-81e172ba23f5 req-e3b39875-85b3-4dcc-9491-4b054003afb3 service nova] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.886995] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735169, 'name': CreateVM_Task, 'duration_secs': 0.344118} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.888029] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 665.890975] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.890975] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.891875] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 665.892202] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "1316318e-8dcf-4ac2-b40a-6a3ab6964997" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.892454] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "1316318e-8dcf-4ac2-b40a-6a3ab6964997" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.895648] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ffa7863-dd5c-47e1-a1b3-5e4b5ac2f4bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.900679] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 665.900679] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5255171c-177b-2ad8-b4bf-9ea54d3c7ecc" [ 665.900679] env[63028]: _type = "Task" [ 665.900679] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.911974] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5255171c-177b-2ad8-b4bf-9ea54d3c7ecc, 'name': SearchDatastore_Task, 'duration_secs': 0.008878} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.912319] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.912536] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 665.912774] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.912923] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.913326] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.913901] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93e1ddfd-e295-4701-8b5e-9491ef7be8f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.924736] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.925009] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 665.930091] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c941e1a-389e-4dad-ae00-80f9f2f87a2d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.933288] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.937649] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 665.937649] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526f6239-6020-426f-ba3b-8b0e564461c9" [ 665.937649] env[63028]: _type = "Task" [ 665.937649] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.946069] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526f6239-6020-426f-ba3b-8b0e564461c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.032940] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735170, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108669} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.033268] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 666.034115] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8230d4-aa72-4830-b41c-70b37fe8bc16 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.055427] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] ba57ed92-aaef-460c-bd45-d0cbe09e4615/ba57ed92-aaef-460c-bd45-d0cbe09e4615.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 666.056687] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95d5b79c-264f-4649-a290-b5287817df2a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.073752] env[63028]: DEBUG nova.network.neutron [-] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.080968] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 666.080968] env[63028]: value = "task-2735171" [ 666.080968] env[63028]: _type = "Task" [ 666.080968] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.089742] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735171, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.155034] env[63028]: DEBUG nova.network.neutron [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Updated VIF entry in instance network info cache for port a07ae27f-6861-4db7-be8b-8f9e4f170f3e. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 666.155034] env[63028]: DEBUG nova.network.neutron [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Updating instance_info_cache with network_info: [{"id": "a07ae27f-6861-4db7-be8b-8f9e4f170f3e", "address": "fa:16:3e:d4:c3:72", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa07ae27f-68", "ovs_interfaceid": "a07ae27f-6861-4db7-be8b-8f9e4f170f3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.223520] env[63028]: DEBUG oslo_concurrency.lockutils [None req-03cc79ea-bccb-49a4-aa10-c7474e32f926 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.579s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.224676] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 37.526s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.224958] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.225157] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.225334] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.229831] env[63028]: INFO nova.compute.manager [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Terminating instance [ 666.236630] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-71de1651-6770-4565-9c72-ef6946a75e2f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.248927] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a612df7-e50d-41ce-869d-ab370ea05a26 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.289812] env[63028]: DEBUG nova.compute.manager [req-b711b7dd-9f42-4471-828f-81e172ba23f5 req-e3b39875-85b3-4dcc-9491-4b054003afb3 service nova] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Detach interface failed, port_id=064a77e5-92b4-4a3f-9a42-bd577ec46683, reason: Instance 86d5d264-7a7a-434b-a1c4-e9a004c0a034 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 666.309875] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "5982cd5d-abf1-42d4-bb44-8d79de599f11" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.310131] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "5982cd5d-abf1-42d4-bb44-8d79de599f11" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.450945] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526f6239-6020-426f-ba3b-8b0e564461c9, 'name': SearchDatastore_Task, 'duration_secs': 0.009406} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.451741] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34c5ce5d-3498-4ddd-a64b-c04948fc396b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.459625] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 666.459625] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d85769-3f5e-56f3-848d-9ab239f9c4a9" [ 666.459625] env[63028]: _type = "Task" [ 666.459625] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.468288] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d85769-3f5e-56f3-848d-9ab239f9c4a9, 'name': SearchDatastore_Task, 'duration_secs': 0.008684} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.470771] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.471065] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] b9d9fe4e-438c-4f68-b011-9eb9e10a381c/b9d9fe4e-438c-4f68-b011-9eb9e10a381c.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 666.471498] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c49f9c93-966e-466f-9506-44b39f19fbbe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.477528] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 666.477528] env[63028]: value = "task-2735172" [ 666.477528] env[63028]: _type = "Task" [ 666.477528] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.486860] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735172, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.577966] env[63028]: INFO nova.compute.manager [-] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Took 1.34 seconds to deallocate network for instance. [ 666.598847] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735171, 'name': ReconfigVM_Task, 'duration_secs': 0.28122} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.600029] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Reconfigured VM instance instance-0000001c to attach disk [datastore2] ba57ed92-aaef-460c-bd45-d0cbe09e4615/ba57ed92-aaef-460c-bd45-d0cbe09e4615.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 666.600029] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-722706f8-4700-47e8-acf7-7e1deddadddd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.608090] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 666.608090] env[63028]: value = "task-2735173" [ 666.608090] env[63028]: _type = "Task" [ 666.608090] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.617154] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735173, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.657255] env[63028]: DEBUG oslo_concurrency.lockutils [req-85872942-7d59-4fa8-afc6-2bd47313e629 req-57dde7ad-2495-491b-95a3-11fd7624e937 service nova] Releasing lock "refresh_cache-b9d9fe4e-438c-4f68-b011-9eb9e10a381c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.723523] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50acad4-074c-4975-9ed8-76f16b3e8022 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.732666] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f8ba20-3116-4e5c-967c-f551b7470986 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.737224] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.737525] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquired lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.737707] env[63028]: DEBUG nova.network.neutron [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 666.769692] env[63028]: DEBUG nova.compute.utils [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Can not refresh info_cache because instance was not found {{(pid=63028) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 666.772665] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075aff9c-96c9-4f7b-a349-2ca4cef44ea8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.782481] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9c0c9c-5783-46c4-9774-c4034132ba18 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.797341] env[63028]: DEBUG nova.compute.provider_tree [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.799854] env[63028]: DEBUG nova.network.neutron [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.902764] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "413f7fea-452b-463f-b396-cdd29e8ffa91" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.903104] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "413f7fea-452b-463f-b396-cdd29e8ffa91" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.903186] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "413f7fea-452b-463f-b396-cdd29e8ffa91-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.903386] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "413f7fea-452b-463f-b396-cdd29e8ffa91-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.903599] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "413f7fea-452b-463f-b396-cdd29e8ffa91-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.906304] env[63028]: INFO nova.compute.manager [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Terminating instance [ 666.910925] env[63028]: DEBUG nova.network.neutron [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.986927] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735172, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.096340] env[63028]: DEBUG oslo_concurrency.lockutils [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.117816] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735173, 'name': Rename_Task, 'duration_secs': 0.342533} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.118254] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 667.118559] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2de0c411-2776-4d97-8003-60ddf43fb775 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.125034] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 667.125034] env[63028]: value = "task-2735174" [ 667.125034] env[63028]: _type = "Task" [ 667.125034] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.133170] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735174, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.300248] env[63028]: DEBUG nova.scheduler.client.report [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.415897] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Releasing lock "refresh_cache-c7a3f2c6-8368-49cc-9737-ea1d836f1783" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.416450] env[63028]: DEBUG nova.compute.manager [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 667.416704] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 667.417336] env[63028]: DEBUG nova.compute.manager [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 667.417531] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 667.417795] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b462ac8-d74f-4cdd-b60e-bdd0703b1f13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.420937] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645e5c6d-a21c-47ce-82cb-e017b9a564e4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.430148] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 667.430417] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce96eec9-310a-4e3c-a2eb-902d41b49e75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.434891] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4606f3b9-3e83-435e-a61c-0e4e1e3909ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.453343] env[63028]: DEBUG oslo_vmware.api [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 667.453343] env[63028]: value = "task-2735175" [ 667.453343] env[63028]: _type = "Task" [ 667.453343] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.454792] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd2ec8c-d0ea-4ed3-9988-11027a60fbcf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.471425] env[63028]: WARNING nova.virt.vmwareapi.vmops [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c7a3f2c6-8368-49cc-9737-ea1d836f1783 could not be found. [ 667.471665] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 667.471877] env[63028]: INFO nova.compute.manager [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Took 0.06 seconds to destroy the instance on the hypervisor. [ 667.472181] env[63028]: DEBUG oslo.service.loopingcall [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 667.472906] env[63028]: DEBUG nova.compute.manager [-] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 667.472986] env[63028]: DEBUG nova.network.neutron [-] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 667.489868] env[63028]: DEBUG oslo_vmware.api [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735175, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.490571] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance 'f3277886-4498-45c6-be68-e71d8293dc00' progress to 0 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 667.502156] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735172, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.518546] env[63028]: DEBUG nova.network.neutron [-] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.635562] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735174, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.805040] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.810653] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 37.326s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.810653] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.810653] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63028) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 667.810653] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.545s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.810653] env[63028]: DEBUG nova.objects.instance [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lazy-loading 'resources' on Instance uuid 44fca05f-51db-4252-bcf8-6bcad37a6147 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 667.813417] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32dca5c1-90be-4940-aa06-51dfb4a62596 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.824333] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a4ef28-05a7-4652-be6e-509f7c6ac6aa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.830250] env[63028]: INFO nova.scheduler.client.report [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Deleted allocations for instance 0dbafad1-ab21-439d-bc8e-e447ac33304e [ 667.842823] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473e2fee-2bdc-4ca9-9486-a7dff6b41a79 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.849405] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c795c0-6298-41dc-bc08-192369ba7d6b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.881233] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179368MB free_disk=110GB free_vcpus=48 pci_devices=None {{(pid=63028) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 667.881423] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.965855] env[63028]: DEBUG oslo_vmware.api [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735175, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.003283] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 668.003602] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735172, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.510326} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.003816] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51793d6a-8414-4088-8aed-cf78136aea3f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.006037] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] b9d9fe4e-438c-4f68-b011-9eb9e10a381c/b9d9fe4e-438c-4f68-b011-9eb9e10a381c.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 668.006212] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 668.006327] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1076fa87-2d11-47d4-968a-cf5dd8d9b02b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.013257] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 668.013257] env[63028]: value = "task-2735176" [ 668.013257] env[63028]: _type = "Task" [ 668.013257] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.014577] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 668.014577] env[63028]: value = "task-2735177" [ 668.014577] env[63028]: _type = "Task" [ 668.014577] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.021467] env[63028]: DEBUG nova.network.neutron [-] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.026517] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.031168] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735177, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.136073] env[63028]: DEBUG oslo_vmware.api [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735174, 'name': PowerOnVM_Task, 'duration_secs': 0.713641} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.136354] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 668.136562] env[63028]: INFO nova.compute.manager [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Took 7.84 seconds to spawn the instance on the hypervisor. [ 668.136809] env[63028]: DEBUG nova.compute.manager [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 668.137616] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2398862-a79a-4b5b-8d8e-da1355b87ad9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.345525] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9b105ba-e8d2-4569-8e3f-130dc12b502c tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "0dbafad1-ab21-439d-bc8e-e447ac33304e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.663s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.463957] env[63028]: DEBUG oslo_vmware.api [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735175, 'name': PowerOffVM_Task, 'duration_secs': 0.867259} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.464232] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 668.464406] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 668.464656] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab920f23-f6d4-4714-9753-fa9d8a00a4fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.534357] env[63028]: INFO nova.compute.manager [-] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Took 1.06 seconds to deallocate network for instance. [ 668.536864] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735177, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06879} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.540566] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 668.540878] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735176, 'name': PowerOffVM_Task, 'duration_secs': 0.306221} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.541218] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 668.541275] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 668.541445] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Deleting the datastore file [datastore1] 413f7fea-452b-463f-b396-cdd29e8ffa91 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 668.544494] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4087691-6cbc-4557-9ae3-c61ed7cce6f4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.546900] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 668.547052] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance 'f3277886-4498-45c6-be68-e71d8293dc00' progress to 17 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 668.551746] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a111ea5-faeb-44cc-b4c6-fcbcf213fbf7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.574844] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] b9d9fe4e-438c-4f68-b011-9eb9e10a381c/b9d9fe4e-438c-4f68-b011-9eb9e10a381c.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 668.578772] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e360da6-299a-418a-9570-0eb50b2c4694 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.593778] env[63028]: DEBUG oslo_vmware.api [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for the task: (returnval){ [ 668.593778] env[63028]: value = "task-2735179" [ 668.593778] env[63028]: _type = "Task" [ 668.593778] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.599965] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 668.599965] env[63028]: value = "task-2735180" [ 668.599965] env[63028]: _type = "Task" [ 668.599965] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.605987] env[63028]: DEBUG oslo_vmware.api [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735179, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.613428] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735180, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.654470] env[63028]: INFO nova.compute.manager [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Took 52.29 seconds to build instance. [ 668.831345] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f0b218-5d62-4819-91dc-1373d35e9bcf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.839364] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831866da-6f65-403a-b709-273f359515fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.870078] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127c9755-c15f-47a8-a632-34ee812d2489 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.877838] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42828065-8fd1-4d01-8573-a83aeb2e68e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.894921] env[63028]: DEBUG nova.compute.provider_tree [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.036025] env[63028]: INFO nova.compute.manager [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Instance disappeared during terminate [ 669.036025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1053660c-ddf1-48d6-a5c4-3c3460088539 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "c7a3f2c6-8368-49cc-9737-ea1d836f1783" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.810s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.057770] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 669.058040] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.058202] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 669.058409] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.058556] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 669.058701] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 669.058904] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 669.059070] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 669.059236] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 669.059395] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 669.059567] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 669.064821] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42842dfb-51f8-46e3-aa50-2850571b6056 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.082715] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 669.082715] env[63028]: value = "task-2735181" [ 669.082715] env[63028]: _type = "Task" [ 669.082715] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.091550] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735181, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.106926] env[63028]: DEBUG oslo_vmware.api [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Task: {'id': task-2735179, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238792} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.111316] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 669.111543] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 669.111810] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 669.112148] env[63028]: INFO nova.compute.manager [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Took 1.69 seconds to destroy the instance on the hypervisor. [ 669.112466] env[63028]: DEBUG oslo.service.loopingcall [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 669.116835] env[63028]: DEBUG nova.compute.manager [-] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 669.116987] env[63028]: DEBUG nova.network.neutron [-] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 669.119178] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735180, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.157461] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b13b0bec-8617-40ed-b3d4-47ce3e1eeef1 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "ba57ed92-aaef-460c-bd45-d0cbe09e4615" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.045s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.320961] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.321256] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.321440] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.321625] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.321794] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.324629] env[63028]: INFO nova.compute.manager [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Terminating instance [ 669.399135] env[63028]: DEBUG nova.scheduler.client.report [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 669.470160] env[63028]: DEBUG nova.compute.manager [req-9dfa15be-95cf-42db-b2ec-3fbb8a78f6f9 req-b41e70df-628d-4a97-917a-4cd6cb1d0c87 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Received event network-vif-deleted-892c8e3d-851e-4ad1-bbab-938e49f4cba1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 669.470689] env[63028]: INFO nova.compute.manager [req-9dfa15be-95cf-42db-b2ec-3fbb8a78f6f9 req-b41e70df-628d-4a97-917a-4cd6cb1d0c87 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Neutron deleted interface 892c8e3d-851e-4ad1-bbab-938e49f4cba1; detaching it from the instance and deleting it from the info cache [ 669.471037] env[63028]: DEBUG nova.network.neutron [req-9dfa15be-95cf-42db-b2ec-3fbb8a78f6f9 req-b41e70df-628d-4a97-917a-4cd6cb1d0c87 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.592844] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735181, 'name': ReconfigVM_Task, 'duration_secs': 0.38367} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.593177] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance 'f3277886-4498-45c6-be68-e71d8293dc00' progress to 33 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 669.609850] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735180, 'name': ReconfigVM_Task, 'duration_secs': 0.590757} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.610393] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Reconfigured VM instance instance-0000001d to attach disk [datastore2] b9d9fe4e-438c-4f68-b011-9eb9e10a381c/b9d9fe4e-438c-4f68-b011-9eb9e10a381c.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 669.610804] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-404da9b5-8c0d-4dfb-8c28-91e774913398 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.618031] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 669.618031] env[63028]: value = "task-2735182" [ 669.618031] env[63028]: _type = "Task" [ 669.618031] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.627962] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735182, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.665306] env[63028]: DEBUG nova.compute.manager [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 669.829755] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "refresh_cache-2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.829954] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquired lock "refresh_cache-2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.830152] env[63028]: DEBUG nova.network.neutron [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 669.907072] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.099s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.910301] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.928s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.910680] env[63028]: DEBUG nova.objects.instance [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lazy-loading 'resources' on Instance uuid e20ed04f-205b-4aa9-b8b6-e352cd237412 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 669.912305] env[63028]: DEBUG nova.network.neutron [-] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.931506] env[63028]: INFO nova.scheduler.client.report [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Deleted allocations for instance 44fca05f-51db-4252-bcf8-6bcad37a6147 [ 669.975689] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-410d548c-0a03-4300-816d-3052e729fb57 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.987783] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6365b7d-3ee6-4662-9544-b2aa96a92b16 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.019173] env[63028]: DEBUG nova.compute.manager [req-9dfa15be-95cf-42db-b2ec-3fbb8a78f6f9 req-b41e70df-628d-4a97-917a-4cd6cb1d0c87 service nova] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Detach interface failed, port_id=892c8e3d-851e-4ad1-bbab-938e49f4cba1, reason: Instance 413f7fea-452b-463f-b396-cdd29e8ffa91 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 670.099793] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 670.100379] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 670.100379] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 670.100379] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 670.102521] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 670.102521] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 670.102521] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 670.102521] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 670.102521] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 670.103075] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 670.103075] env[63028]: DEBUG nova.virt.hardware [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 670.111254] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Reconfiguring VM instance instance-00000013 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 670.111254] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2092c459-92be-4cd9-a285-dbd3ef207834 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.130543] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735182, 'name': Rename_Task, 'duration_secs': 0.310246} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.131855] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 670.132192] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 670.132192] env[63028]: value = "task-2735183" [ 670.132192] env[63028]: _type = "Task" [ 670.132192] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.132493] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1baac328-7a19-4efd-9dbf-b536ce5691b0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.145016] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 670.145016] env[63028]: value = "task-2735184" [ 670.145016] env[63028]: _type = "Task" [ 670.145016] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.151490] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.187115] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.358026] env[63028]: DEBUG nova.network.neutron [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.420137] env[63028]: INFO nova.compute.manager [-] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Took 1.30 seconds to deallocate network for instance. [ 670.440060] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7b8ec20a-6c04-49c4-9391-64bfcf0e97f9 tempest-InstanceActionsTestJSON-1724293896 tempest-InstanceActionsTestJSON-1724293896-project-member] Lock "44fca05f-51db-4252-bcf8-6bcad37a6147" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.211s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.458068] env[63028]: DEBUG nova.network.neutron [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.645632] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735183, 'name': ReconfigVM_Task, 'duration_secs': 0.311243} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.649299] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Reconfigured VM instance instance-00000013 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 670.652804] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec332b2-f3e2-4322-86ef-d1c0c0746a9a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.663664] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735184, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.683942] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] f3277886-4498-45c6-be68-e71d8293dc00/f3277886-4498-45c6-be68-e71d8293dc00.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 670.687441] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c620be33-a8d4-4de1-bc1e-a455c8a80d32 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.709833] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 670.709833] env[63028]: value = "task-2735185" [ 670.709833] env[63028]: _type = "Task" [ 670.709833] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.721315] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735185, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.933014] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.956672] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c15d88-4a3d-4ebf-a665-97c46709a3f6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.963690] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Releasing lock "refresh_cache-2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.964108] env[63028]: DEBUG nova.compute.manager [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 670.964297] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 670.968091] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84372ff-693b-4add-811f-6f15c34ee5ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.971170] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a51eaf-ed23-4b9c-aa30-1e99b511d60c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.979664] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 671.002887] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40fb4c5f-4b52-4a94-81d9-f9e4d9dfc885 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.005678] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506977f2-024c-4602-b58c-dd9bac9627a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.013690] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39578e8a-35cb-4193-bb2a-e901a0a6671f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.018737] env[63028]: DEBUG oslo_vmware.api [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 671.018737] env[63028]: value = "task-2735186" [ 671.018737] env[63028]: _type = "Task" [ 671.018737] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.029801] env[63028]: DEBUG nova.compute.provider_tree [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.037132] env[63028]: DEBUG oslo_vmware.api [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735186, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.154741] env[63028]: DEBUG oslo_vmware.api [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735184, 'name': PowerOnVM_Task, 'duration_secs': 0.617359} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.155115] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 671.155417] env[63028]: INFO nova.compute.manager [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Took 8.13 seconds to spawn the instance on the hypervisor. [ 671.155685] env[63028]: DEBUG nova.compute.manager [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 671.156494] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d974e95c-9754-4215-9159-e961c2f9d0f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.219273] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735185, 'name': ReconfigVM_Task, 'duration_secs': 0.295707} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.219612] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Reconfigured VM instance instance-00000013 to attach disk [datastore1] f3277886-4498-45c6-be68-e71d8293dc00/f3277886-4498-45c6-be68-e71d8293dc00.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 671.219969] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance 'f3277886-4498-45c6-be68-e71d8293dc00' progress to 50 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 671.528952] env[63028]: DEBUG oslo_vmware.api [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735186, 'name': PowerOffVM_Task, 'duration_secs': 0.236453} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.529226] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 671.529390] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 671.529715] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a7f47ca-ba8d-4cac-8026-b9b54cea1293 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.533150] env[63028]: DEBUG nova.scheduler.client.report [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 671.552668] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 671.552871] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 671.554364] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Deleting the datastore file [datastore1] 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 671.554860] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5c44f9e-2516-49f7-b7ca-c4230a8087ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.560539] env[63028]: DEBUG oslo_vmware.api [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for the task: (returnval){ [ 671.560539] env[63028]: value = "task-2735188" [ 671.560539] env[63028]: _type = "Task" [ 671.560539] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.568887] env[63028]: DEBUG oslo_vmware.api [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735188, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.677613] env[63028]: INFO nova.compute.manager [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Took 49.96 seconds to build instance. [ 671.727475] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600b13c2-1876-42b6-943c-a20790a10449 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.748093] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d08b455-7fb3-4e0d-b5d5-3d9e39a6143f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.766292] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance 'f3277886-4498-45c6-be68-e71d8293dc00' progress to 67 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 672.038163] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.128s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.047655] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.805s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.049309] env[63028]: INFO nova.compute.claims [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 672.070936] env[63028]: DEBUG oslo_vmware.api [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Task: {'id': task-2735188, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099372} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.070936] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 672.071209] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 672.072471] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 672.072471] env[63028]: INFO nova.compute.manager [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 672.072471] env[63028]: DEBUG oslo.service.loopingcall [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 672.072936] env[63028]: DEBUG nova.compute.manager [-] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 672.073133] env[63028]: DEBUG nova.network.neutron [-] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 672.081966] env[63028]: INFO nova.scheduler.client.report [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Deleted allocations for instance e20ed04f-205b-4aa9-b8b6-e352cd237412 [ 672.179106] env[63028]: DEBUG oslo_concurrency.lockutils [None req-38942d5b-be69-4e95-8160-07bf85f3e0d2 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "b9d9fe4e-438c-4f68-b011-9eb9e10a381c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.317s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.313173] env[63028]: DEBUG nova.network.neutron [-] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.333349] env[63028]: DEBUG nova.network.neutron [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Port 08a61148-5b3a-4bb0-a130-3eda62d6bf7c binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 672.593104] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2154be98-cddf-420d-bcaf-1b50951be8e6 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "e20ed04f-205b-4aa9-b8b6-e352cd237412" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.672s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.684969] env[63028]: DEBUG nova.compute.manager [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 672.818146] env[63028]: DEBUG nova.network.neutron [-] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.170107] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "3e45e7f3-a34f-4eab-9fff-1c874c832e2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.170107] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "3e45e7f3-a34f-4eab-9fff-1c874c832e2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.215028] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.321461] env[63028]: INFO nova.compute.manager [-] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Took 1.25 seconds to deallocate network for instance. [ 673.358659] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "f3277886-4498-45c6-be68-e71d8293dc00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.358939] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "f3277886-4498-45c6-be68-e71d8293dc00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.359175] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "f3277886-4498-45c6-be68-e71d8293dc00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.607234] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92da1f5e-f57a-4164-ac6e-c7023b7e07e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.620758] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d98add-dece-4d01-b6ce-0719fdebf58a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.662075] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21542039-4e4e-4ea4-8e25-5dcb33278023 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.672254] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c033436-d647-4e18-9655-a6e73141c127 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.685268] env[63028]: DEBUG nova.compute.provider_tree [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 673.830677] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.211011] env[63028]: ERROR nova.scheduler.client.report [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [req-898b9416-c487-48f7-9793-7f91aca093ba] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-898b9416-c487-48f7-9793-7f91aca093ba"}]} [ 674.230055] env[63028]: DEBUG nova.scheduler.client.report [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Refreshing inventories for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 674.242567] env[63028]: DEBUG nova.scheduler.client.report [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Updating ProviderTree inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 674.242863] env[63028]: DEBUG nova.compute.provider_tree [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 60 to 61 during operation: update_inventory {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 674.243085] env[63028]: DEBUG nova.compute.provider_tree [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 674.254753] env[63028]: DEBUG nova.scheduler.client.report [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Refreshing aggregate associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, aggregates: None {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 674.270368] env[63028]: DEBUG nova.scheduler.client.report [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Refreshing trait associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 674.414303] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.414495] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.414665] env[63028]: DEBUG nova.network.neutron [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 674.618580] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "a4b0d948-d950-414a-b23f-faefa5ab038c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.618817] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "a4b0d948-d950-414a-b23f-faefa5ab038c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.680173] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e525f053-29fd-4bf6-a1f6-211bff6243b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.689445] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdc074b-2f99-40ad-8950-6044aff5f717 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.720695] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6acd82f5-cbb6-41e9-82c9-572fe32ccd3a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.728167] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f52d8b-96d6-4bd0-9e2d-aadd7c4b4b56 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.741402] env[63028]: DEBUG nova.compute.provider_tree [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 675.131491] env[63028]: DEBUG nova.network.neutron [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance_info_cache with network_info: [{"id": "08a61148-5b3a-4bb0-a130-3eda62d6bf7c", "address": "fa:16:3e:8e:67:6c", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08a61148-5b", "ovs_interfaceid": "08a61148-5b3a-4bb0-a130-3eda62d6bf7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.278351] env[63028]: DEBUG nova.scheduler.client.report [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Updated inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with generation 61 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 675.278663] env[63028]: DEBUG nova.compute.provider_tree [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 61 to 62 during operation: update_inventory {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 675.278882] env[63028]: DEBUG nova.compute.provider_tree [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 675.635143] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.784069] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.736s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.784707] env[63028]: DEBUG nova.compute.manager [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 675.787778] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.356s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.789154] env[63028]: INFO nova.compute.claims [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 676.157807] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674d4444-dee8-4a38-929a-c8c71d7cb310 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.178552] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600d09b3-afc0-4ed2-8914-723bcef402f3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.185479] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance 'f3277886-4498-45c6-be68-e71d8293dc00' progress to 83 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 676.293798] env[63028]: DEBUG nova.compute.utils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 676.297519] env[63028]: DEBUG nova.compute.manager [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 676.297519] env[63028]: DEBUG nova.network.neutron [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 676.340717] env[63028]: DEBUG nova.policy [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab9cb927bc134277bb980682fef01978', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ef9a42771824708832a74238bbe89c0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 676.638862] env[63028]: DEBUG nova.network.neutron [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Successfully created port: 49500b3e-3a86-4a0a-94ed-762a86a78124 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 676.691763] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 676.692085] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e8123d0-f3d9-4bb5-aac8-6269dd5ba069 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.698676] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 676.698676] env[63028]: value = "task-2735189" [ 676.698676] env[63028]: _type = "Task" [ 676.698676] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.707964] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735189, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.797985] env[63028]: DEBUG nova.compute.manager [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 677.210901] env[63028]: DEBUG oslo_vmware.api [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735189, 'name': PowerOnVM_Task, 'duration_secs': 0.370815} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.211180] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 677.211361] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d44a3b69-c923-454a-bfd2-94e1a4dde1e3 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance 'f3277886-4498-45c6-be68-e71d8293dc00' progress to 100 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 677.360859] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30cc886e-b575-48df-8e0d-41c0341db6ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.368994] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c82654d-4a6a-4baf-9a1a-825a56a4bab3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.401421] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59706887-4247-49c0-b6fe-4801d70b0cc7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.408627] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f351ce03-aaa1-4c2b-b0f7-6f173d2c37c0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.421715] env[63028]: DEBUG nova.compute.provider_tree [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.809207] env[63028]: DEBUG nova.compute.manager [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 677.835074] env[63028]: DEBUG nova.virt.hardware [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 677.835363] env[63028]: DEBUG nova.virt.hardware [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 677.835502] env[63028]: DEBUG nova.virt.hardware [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 677.835687] env[63028]: DEBUG nova.virt.hardware [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 677.835834] env[63028]: DEBUG nova.virt.hardware [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 677.835978] env[63028]: DEBUG nova.virt.hardware [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 677.836203] env[63028]: DEBUG nova.virt.hardware [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 677.836379] env[63028]: DEBUG nova.virt.hardware [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 677.836542] env[63028]: DEBUG nova.virt.hardware [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 677.836700] env[63028]: DEBUG nova.virt.hardware [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 677.836887] env[63028]: DEBUG nova.virt.hardware [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 677.837835] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0124f01-bf9d-45b7-b1ac-5d7f82cd5a20 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.845712] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f1ac3b-d436-4fe6-b0df-70575da34c87 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.924816] env[63028]: DEBUG nova.scheduler.client.report [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 678.070603] env[63028]: DEBUG nova.compute.manager [req-35051d69-4da5-4e90-bd4f-2acb6acb0883 req-67bfed77-4558-405b-8352-e25cf992dbac service nova] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Received event network-vif-plugged-49500b3e-3a86-4a0a-94ed-762a86a78124 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 678.071108] env[63028]: DEBUG oslo_concurrency.lockutils [req-35051d69-4da5-4e90-bd4f-2acb6acb0883 req-67bfed77-4558-405b-8352-e25cf992dbac service nova] Acquiring lock "b9db75ba-6832-45e8-8faf-d1cdaa7dabdd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.071330] env[63028]: DEBUG oslo_concurrency.lockutils [req-35051d69-4da5-4e90-bd4f-2acb6acb0883 req-67bfed77-4558-405b-8352-e25cf992dbac service nova] Lock "b9db75ba-6832-45e8-8faf-d1cdaa7dabdd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.071503] env[63028]: DEBUG oslo_concurrency.lockutils [req-35051d69-4da5-4e90-bd4f-2acb6acb0883 req-67bfed77-4558-405b-8352-e25cf992dbac service nova] Lock "b9db75ba-6832-45e8-8faf-d1cdaa7dabdd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.071674] env[63028]: DEBUG nova.compute.manager [req-35051d69-4da5-4e90-bd4f-2acb6acb0883 req-67bfed77-4558-405b-8352-e25cf992dbac service nova] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] No waiting events found dispatching network-vif-plugged-49500b3e-3a86-4a0a-94ed-762a86a78124 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 678.071837] env[63028]: WARNING nova.compute.manager [req-35051d69-4da5-4e90-bd4f-2acb6acb0883 req-67bfed77-4558-405b-8352-e25cf992dbac service nova] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Received unexpected event network-vif-plugged-49500b3e-3a86-4a0a-94ed-762a86a78124 for instance with vm_state building and task_state spawning. [ 678.185258] env[63028]: DEBUG nova.network.neutron [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Successfully updated port: 49500b3e-3a86-4a0a-94ed-762a86a78124 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 678.430214] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.642s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.430475] env[63028]: DEBUG nova.compute.manager [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 678.433509] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.874s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.435135] env[63028]: INFO nova.compute.claims [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 678.687254] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "refresh_cache-b9db75ba-6832-45e8-8faf-d1cdaa7dabdd" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.687424] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "refresh_cache-b9db75ba-6832-45e8-8faf-d1cdaa7dabdd" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.687616] env[63028]: DEBUG nova.network.neutron [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 678.941334] env[63028]: DEBUG nova.compute.utils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 678.942524] env[63028]: DEBUG nova.compute.manager [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 678.942697] env[63028]: DEBUG nova.network.neutron [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 678.946694] env[63028]: DEBUG oslo_concurrency.lockutils [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "f3277886-4498-45c6-be68-e71d8293dc00" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.948036] env[63028]: DEBUG oslo_concurrency.lockutils [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "f3277886-4498-45c6-be68-e71d8293dc00" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.948036] env[63028]: DEBUG nova.compute.manager [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Going to confirm migration 1 {{(pid=63028) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 678.997220] env[63028]: DEBUG nova.policy [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ea55676424d45bf9fec5e787076e65f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '288105b341df4d7586d2734706a0965a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 679.226804] env[63028]: DEBUG nova.network.neutron [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.325880] env[63028]: DEBUG nova.network.neutron [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Successfully created port: 4c75ac24-25df-4c2f-8ce9-e0b2ec182139 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 679.432027] env[63028]: DEBUG nova.network.neutron [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Updating instance_info_cache with network_info: [{"id": "49500b3e-3a86-4a0a-94ed-762a86a78124", "address": "fa:16:3e:95:a0:32", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49500b3e-3a", "ovs_interfaceid": "49500b3e-3a86-4a0a-94ed-762a86a78124", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.447753] env[63028]: DEBUG nova.compute.manager [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 679.531484] env[63028]: DEBUG oslo_concurrency.lockutils [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.531594] env[63028]: DEBUG oslo_concurrency.lockutils [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.531771] env[63028]: DEBUG nova.network.neutron [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.531947] env[63028]: DEBUG nova.objects.instance [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lazy-loading 'info_cache' on Instance uuid f3277886-4498-45c6-be68-e71d8293dc00 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 679.933837] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "refresh_cache-b9db75ba-6832-45e8-8faf-d1cdaa7dabdd" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.934179] env[63028]: DEBUG nova.compute.manager [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Instance network_info: |[{"id": "49500b3e-3a86-4a0a-94ed-762a86a78124", "address": "fa:16:3e:95:a0:32", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49500b3e-3a", "ovs_interfaceid": "49500b3e-3a86-4a0a-94ed-762a86a78124", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 679.934990] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:a0:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c330dbdb-ad20-4e7e-8a12-66e4a914a84a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '49500b3e-3a86-4a0a-94ed-762a86a78124', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 679.942454] env[63028]: DEBUG oslo.service.loopingcall [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 679.942668] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 679.942896] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d9dffb1-6a8a-4aee-8b67-cb68b27335c1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.969516] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 679.969516] env[63028]: value = "task-2735190" [ 679.969516] env[63028]: _type = "Task" [ 679.969516] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.980481] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735190, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.009057] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2659898-3088-4f2c-a2c4-15ffa6dd9784 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.016509] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193b6a81-1dfb-483c-b74f-33853016307c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.048475] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09c6c56-1059-46f6-a62a-3f53d9f50b09 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.057055] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67cce07-5654-4c52-9e6a-e5c9c626e36a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.072380] env[63028]: DEBUG nova.compute.provider_tree [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.094723] env[63028]: DEBUG nova.compute.manager [req-643612cc-4fad-4523-8e32-b34ca61ac187 req-367a61fa-2cac-410e-bc57-7926368e724f service nova] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Received event network-changed-49500b3e-3a86-4a0a-94ed-762a86a78124 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 680.094922] env[63028]: DEBUG nova.compute.manager [req-643612cc-4fad-4523-8e32-b34ca61ac187 req-367a61fa-2cac-410e-bc57-7926368e724f service nova] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Refreshing instance network info cache due to event network-changed-49500b3e-3a86-4a0a-94ed-762a86a78124. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 680.095165] env[63028]: DEBUG oslo_concurrency.lockutils [req-643612cc-4fad-4523-8e32-b34ca61ac187 req-367a61fa-2cac-410e-bc57-7926368e724f service nova] Acquiring lock "refresh_cache-b9db75ba-6832-45e8-8faf-d1cdaa7dabdd" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.095320] env[63028]: DEBUG oslo_concurrency.lockutils [req-643612cc-4fad-4523-8e32-b34ca61ac187 req-367a61fa-2cac-410e-bc57-7926368e724f service nova] Acquired lock "refresh_cache-b9db75ba-6832-45e8-8faf-d1cdaa7dabdd" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.095480] env[63028]: DEBUG nova.network.neutron [req-643612cc-4fad-4523-8e32-b34ca61ac187 req-367a61fa-2cac-410e-bc57-7926368e724f service nova] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Refreshing network info cache for port 49500b3e-3a86-4a0a-94ed-762a86a78124 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 680.466351] env[63028]: DEBUG nova.compute.manager [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 680.479124] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735190, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.498887] env[63028]: DEBUG nova.virt.hardware [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 680.499261] env[63028]: DEBUG nova.virt.hardware [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 680.499486] env[63028]: DEBUG nova.virt.hardware [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 680.499776] env[63028]: DEBUG nova.virt.hardware [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 680.500050] env[63028]: DEBUG nova.virt.hardware [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 680.500311] env[63028]: DEBUG nova.virt.hardware [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 680.500647] env[63028]: DEBUG nova.virt.hardware [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 680.500913] env[63028]: DEBUG nova.virt.hardware [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 680.503853] env[63028]: DEBUG nova.virt.hardware [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 680.503853] env[63028]: DEBUG nova.virt.hardware [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 680.503853] env[63028]: DEBUG nova.virt.hardware [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 680.503853] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ea5022-3b4d-4ba6-b023-42c4e9fecc9e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.512802] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606adfc2-a884-4453-86be-18cdfe086268 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.576818] env[63028]: DEBUG nova.scheduler.client.report [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 680.914435] env[63028]: DEBUG nova.network.neutron [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Successfully updated port: 4c75ac24-25df-4c2f-8ce9-e0b2ec182139 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 680.977757] env[63028]: DEBUG nova.network.neutron [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance_info_cache with network_info: [{"id": "08a61148-5b3a-4bb0-a130-3eda62d6bf7c", "address": "fa:16:3e:8e:67:6c", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08a61148-5b", "ovs_interfaceid": "08a61148-5b3a-4bb0-a130-3eda62d6bf7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.985480] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735190, 'name': CreateVM_Task, 'duration_secs': 0.795944} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.985899] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 680.986573] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.986741] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.988042] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 680.989355] env[63028]: DEBUG nova.network.neutron [req-643612cc-4fad-4523-8e32-b34ca61ac187 req-367a61fa-2cac-410e-bc57-7926368e724f service nova] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Updated VIF entry in instance network info cache for port 49500b3e-3a86-4a0a-94ed-762a86a78124. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 680.989355] env[63028]: DEBUG nova.network.neutron [req-643612cc-4fad-4523-8e32-b34ca61ac187 req-367a61fa-2cac-410e-bc57-7926368e724f service nova] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Updating instance_info_cache with network_info: [{"id": "49500b3e-3a86-4a0a-94ed-762a86a78124", "address": "fa:16:3e:95:a0:32", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49500b3e-3a", "ovs_interfaceid": "49500b3e-3a86-4a0a-94ed-762a86a78124", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.990497] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feca4361-7e35-4ae0-ab16-f805f5e6ce26 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.995731] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 680.995731] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52425072-d2a3-ce21-af77-1a9f9634a0e2" [ 680.995731] env[63028]: _type = "Task" [ 680.995731] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.004624] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52425072-d2a3-ce21-af77-1a9f9634a0e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.081931] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.082357] env[63028]: DEBUG nova.compute.manager [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 681.085151] env[63028]: DEBUG oslo_concurrency.lockutils [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.383s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.085376] env[63028]: DEBUG nova.objects.instance [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lazy-loading 'resources' on Instance uuid ddf20137-4d63-4c7a-b519-445719265e1d {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 681.416492] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Acquiring lock "refresh_cache-8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.416609] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Acquired lock "refresh_cache-8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.416769] env[63028]: DEBUG nova.network.neutron [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 681.486983] env[63028]: DEBUG oslo_concurrency.lockutils [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "refresh_cache-f3277886-4498-45c6-be68-e71d8293dc00" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.487308] env[63028]: DEBUG nova.objects.instance [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lazy-loading 'migration_context' on Instance uuid f3277886-4498-45c6-be68-e71d8293dc00 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 681.495321] env[63028]: DEBUG oslo_concurrency.lockutils [req-643612cc-4fad-4523-8e32-b34ca61ac187 req-367a61fa-2cac-410e-bc57-7926368e724f service nova] Releasing lock "refresh_cache-b9db75ba-6832-45e8-8faf-d1cdaa7dabdd" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.507026] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52425072-d2a3-ce21-af77-1a9f9634a0e2, 'name': SearchDatastore_Task, 'duration_secs': 0.009941} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.507374] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.507679] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 681.507953] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.508170] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.508385] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 681.508703] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59be95c6-7bc2-4340-b436-2336056cad3e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.519120] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 681.519355] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 681.520429] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57db803b-5baf-41ea-8ced-4947c4ede5cd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.525701] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 681.525701] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52199e43-4f9e-c8e4-ae56-7bcd9153b888" [ 681.525701] env[63028]: _type = "Task" [ 681.525701] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.534415] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52199e43-4f9e-c8e4-ae56-7bcd9153b888, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.589316] env[63028]: DEBUG nova.compute.utils [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 681.593806] env[63028]: DEBUG nova.compute.manager [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 681.594210] env[63028]: DEBUG nova.network.neutron [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 681.638677] env[63028]: DEBUG nova.policy [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05add87f8e004545a09d9d88ef4b7ceb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18767e9ea2fb48a186d76bcb28d00acb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 681.976089] env[63028]: DEBUG nova.network.neutron [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.994118] env[63028]: DEBUG nova.objects.base [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 681.994859] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f51c0c6-42a1-4637-a6f3-bfdae67f538c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.025413] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47679de8-7c4a-4bcf-96d4-b30045f0d991 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.038707] env[63028]: DEBUG oslo_vmware.api [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 682.038707] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5204056c-6da0-89be-2032-3571864f3181" [ 682.038707] env[63028]: _type = "Task" [ 682.038707] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.039077] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52199e43-4f9e-c8e4-ae56-7bcd9153b888, 'name': SearchDatastore_Task, 'duration_secs': 0.010146} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.046831] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-472ecb55-4719-43bf-85ed-6542aa9465ea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.051705] env[63028]: DEBUG nova.network.neutron [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Successfully created port: 9197f89b-957f-4d27-a314-ca95bd44a77d {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 682.061835] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 682.061835] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528a5325-149c-5e3a-6fd8-97727ea9ab23" [ 682.061835] env[63028]: _type = "Task" [ 682.061835] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.061835] env[63028]: DEBUG oslo_vmware.api [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5204056c-6da0-89be-2032-3571864f3181, 'name': SearchDatastore_Task, 'duration_secs': 0.00751} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.062079] env[63028]: DEBUG oslo_concurrency.lockutils [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.071376] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528a5325-149c-5e3a-6fd8-97727ea9ab23, 'name': SearchDatastore_Task, 'duration_secs': 0.008724} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.073773] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.074170] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] b9db75ba-6832-45e8-8faf-d1cdaa7dabdd/b9db75ba-6832-45e8-8faf-d1cdaa7dabdd.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 682.074665] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0f3564a-4274-4bda-9bd4-03feee2f3947 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.081616] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 682.081616] env[63028]: value = "task-2735191" [ 682.081616] env[63028]: _type = "Task" [ 682.081616] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.089289] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735191, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.095775] env[63028]: DEBUG nova.compute.manager [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 682.099065] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37243fb-8506-4f8d-9260-a9e447c32df2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.108479] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b84c936-2dda-4fcb-b1fa-ec8ce63d13f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.143621] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926dbdcd-8a09-433c-9f2b-84b06e8a733a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.151757] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5035b87-681d-49e8-8e93-2c80422a2bd0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.166429] env[63028]: DEBUG nova.compute.provider_tree [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.169387] env[63028]: DEBUG nova.compute.manager [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Received event network-vif-plugged-4c75ac24-25df-4c2f-8ce9-e0b2ec182139 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 682.169568] env[63028]: DEBUG oslo_concurrency.lockutils [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] Acquiring lock "8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.169806] env[63028]: DEBUG oslo_concurrency.lockutils [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] Lock "8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.169974] env[63028]: DEBUG oslo_concurrency.lockutils [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] Lock "8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.170151] env[63028]: DEBUG nova.compute.manager [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] No waiting events found dispatching network-vif-plugged-4c75ac24-25df-4c2f-8ce9-e0b2ec182139 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 682.170318] env[63028]: WARNING nova.compute.manager [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Received unexpected event network-vif-plugged-4c75ac24-25df-4c2f-8ce9-e0b2ec182139 for instance with vm_state building and task_state spawning. [ 682.170506] env[63028]: DEBUG nova.compute.manager [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Received event network-changed-4c75ac24-25df-4c2f-8ce9-e0b2ec182139 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 682.170612] env[63028]: DEBUG nova.compute.manager [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Refreshing instance network info cache due to event network-changed-4c75ac24-25df-4c2f-8ce9-e0b2ec182139. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 682.170775] env[63028]: DEBUG oslo_concurrency.lockutils [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] Acquiring lock "refresh_cache-8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.271845] env[63028]: DEBUG nova.network.neutron [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Updating instance_info_cache with network_info: [{"id": "4c75ac24-25df-4c2f-8ce9-e0b2ec182139", "address": "fa:16:3e:77:91:40", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c75ac24-25", "ovs_interfaceid": "4c75ac24-25df-4c2f-8ce9-e0b2ec182139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.591318] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735191, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.606031] env[63028]: INFO nova.virt.block_device [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Booting with volume 39116d21-d007-4c27-9ce1-9f92bb99f75c at /dev/sda [ 682.646123] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5e357ca-d781-42ce-b5e6-e4cfccaa965a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.655061] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152d3a3a-b278-4c61-a49e-166ae3923b71 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.672657] env[63028]: DEBUG nova.scheduler.client.report [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 682.687623] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ba922f4-0401-4ffc-a2ff-40564ee85668 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.696254] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83059182-c2ec-4056-9b05-1c484e3c61a7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.727107] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026fdb6f-190a-41ed-b613-37eec4853d32 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.733395] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406ca4fc-11b4-4eaf-9554-89f9b2a1b6f7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.746818] env[63028]: DEBUG nova.virt.block_device [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Updating existing volume attachment record: 1a55ad23-45a7-40ae-94c2-35cff7ed90a8 {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 682.776421] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Releasing lock "refresh_cache-8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.776761] env[63028]: DEBUG nova.compute.manager [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Instance network_info: |[{"id": "4c75ac24-25df-4c2f-8ce9-e0b2ec182139", "address": "fa:16:3e:77:91:40", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c75ac24-25", "ovs_interfaceid": "4c75ac24-25df-4c2f-8ce9-e0b2ec182139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 682.777073] env[63028]: DEBUG oslo_concurrency.lockutils [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] Acquired lock "refresh_cache-8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.777262] env[63028]: DEBUG nova.network.neutron [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Refreshing network info cache for port 4c75ac24-25df-4c2f-8ce9-e0b2ec182139 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 682.778520] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:91:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c75ac24-25df-4c2f-8ce9-e0b2ec182139', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 682.785637] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Creating folder: Project (288105b341df4d7586d2734706a0965a). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.786659] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a9d97556-2cd1-4406-bc8e-42e85622cd00 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.797404] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Created folder: Project (288105b341df4d7586d2734706a0965a) in parent group-v550570. [ 682.797528] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Creating folder: Instances. Parent ref: group-v550672. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.797737] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-872cd292-8155-4f8f-9fca-571b3c2d1637 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.806777] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Created folder: Instances in parent group-v550672. [ 682.807079] env[63028]: DEBUG oslo.service.loopingcall [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 682.807630] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 682.807630] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42566eb3-6c20-48fd-a953-33d10ec7019f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.825379] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 682.825379] env[63028]: value = "task-2735194" [ 682.825379] env[63028]: _type = "Task" [ 682.825379] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.832590] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735194, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.092149] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735191, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559303} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.092421] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] b9db75ba-6832-45e8-8faf-d1cdaa7dabdd/b9db75ba-6832-45e8-8faf-d1cdaa7dabdd.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 683.092635] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 683.092996] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f98c044-3e9d-4945-a7d4-cdb92dd59793 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.098989] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 683.098989] env[63028]: value = "task-2735195" [ 683.098989] env[63028]: _type = "Task" [ 683.098989] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.107357] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.188665] env[63028]: DEBUG oslo_concurrency.lockutils [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.103s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.191356] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.875s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.192872] env[63028]: INFO nova.compute.claims [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 683.215165] env[63028]: INFO nova.scheduler.client.report [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Deleted allocations for instance ddf20137-4d63-4c7a-b519-445719265e1d [ 683.335656] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735194, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.575376] env[63028]: DEBUG nova.network.neutron [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Updated VIF entry in instance network info cache for port 4c75ac24-25df-4c2f-8ce9-e0b2ec182139. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 683.575895] env[63028]: DEBUG nova.network.neutron [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Updating instance_info_cache with network_info: [{"id": "4c75ac24-25df-4c2f-8ce9-e0b2ec182139", "address": "fa:16:3e:77:91:40", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c75ac24-25", "ovs_interfaceid": "4c75ac24-25df-4c2f-8ce9-e0b2ec182139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.613917] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069551} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.613917] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 683.614743] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246f8104-d44d-4a47-b844-a6784e69daea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.643854] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] b9db75ba-6832-45e8-8faf-d1cdaa7dabdd/b9db75ba-6832-45e8-8faf-d1cdaa7dabdd.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 683.644605] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36b31aa5-bcbc-498e-b6b4-291ad74d496c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.669668] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 683.669668] env[63028]: value = "task-2735196" [ 683.669668] env[63028]: _type = "Task" [ 683.669668] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.682572] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735196, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.691920] env[63028]: DEBUG nova.compute.manager [req-c39cf250-8b89-419d-815a-1e227fe3c38b req-d49522d4-bedf-447e-8f75-5ebc39b44d5b service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Received event network-vif-plugged-9197f89b-957f-4d27-a314-ca95bd44a77d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 683.692157] env[63028]: DEBUG oslo_concurrency.lockutils [req-c39cf250-8b89-419d-815a-1e227fe3c38b req-d49522d4-bedf-447e-8f75-5ebc39b44d5b service nova] Acquiring lock "8c7c8713-d5d7-490e-aba5-25d98bfbfaa0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.692382] env[63028]: DEBUG oslo_concurrency.lockutils [req-c39cf250-8b89-419d-815a-1e227fe3c38b req-d49522d4-bedf-447e-8f75-5ebc39b44d5b service nova] Lock "8c7c8713-d5d7-490e-aba5-25d98bfbfaa0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.692567] env[63028]: DEBUG oslo_concurrency.lockutils [req-c39cf250-8b89-419d-815a-1e227fe3c38b req-d49522d4-bedf-447e-8f75-5ebc39b44d5b service nova] Lock "8c7c8713-d5d7-490e-aba5-25d98bfbfaa0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.692744] env[63028]: DEBUG nova.compute.manager [req-c39cf250-8b89-419d-815a-1e227fe3c38b req-d49522d4-bedf-447e-8f75-5ebc39b44d5b service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] No waiting events found dispatching network-vif-plugged-9197f89b-957f-4d27-a314-ca95bd44a77d {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 683.692984] env[63028]: WARNING nova.compute.manager [req-c39cf250-8b89-419d-815a-1e227fe3c38b req-d49522d4-bedf-447e-8f75-5ebc39b44d5b service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Received unexpected event network-vif-plugged-9197f89b-957f-4d27-a314-ca95bd44a77d for instance with vm_state building and task_state block_device_mapping. [ 683.724282] env[63028]: DEBUG oslo_concurrency.lockutils [None req-887cc1bb-3511-4f80-9fa0-76db2df39f89 tempest-AttachInterfacesV270Test-253249065 tempest-AttachInterfacesV270Test-253249065-project-member] Lock "ddf20137-4d63-4c7a-b519-445719265e1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.115s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.831532] env[63028]: DEBUG nova.network.neutron [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Successfully updated port: 9197f89b-957f-4d27-a314-ca95bd44a77d {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 683.838139] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735194, 'name': CreateVM_Task, 'duration_secs': 0.948363} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.838352] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 683.839113] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.839366] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.839664] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 683.839976] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c770a416-22d0-43ee-8730-cd4cc4b2df4b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.845174] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Waiting for the task: (returnval){ [ 683.845174] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5273cd79-e533-b63c-28be-7e0d7268be18" [ 683.845174] env[63028]: _type = "Task" [ 683.845174] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.853759] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5273cd79-e533-b63c-28be-7e0d7268be18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.081381] env[63028]: DEBUG oslo_concurrency.lockutils [req-29d97016-1189-4353-97ff-80389c45b645 req-ae0ee28b-6beb-46c8-8e5d-13b12da1725c service nova] Releasing lock "refresh_cache-8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.179615] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.335932] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Acquiring lock "refresh_cache-8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.336077] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Acquired lock "refresh_cache-8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.336262] env[63028]: DEBUG nova.network.neutron [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 684.364886] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5273cd79-e533-b63c-28be-7e0d7268be18, 'name': SearchDatastore_Task, 'duration_secs': 0.017044} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.365540] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.365836] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 684.366148] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.369020] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.369020] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 684.369020] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82685791-9dfc-49db-aa3e-64dd408c81b1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.379289] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 684.379510] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 684.380340] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c1d82e2-373a-42de-b99b-af8d4a3da0f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.388828] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Waiting for the task: (returnval){ [ 684.388828] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525f4358-0235-a0f0-4dad-f7e5fff67253" [ 684.388828] env[63028]: _type = "Task" [ 684.388828] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.397847] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525f4358-0235-a0f0-4dad-f7e5fff67253, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.687320] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735196, 'name': ReconfigVM_Task, 'duration_secs': 0.73335} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.687600] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Reconfigured VM instance instance-0000001e to attach disk [datastore1] b9db75ba-6832-45e8-8faf-d1cdaa7dabdd/b9db75ba-6832-45e8-8faf-d1cdaa7dabdd.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 684.688225] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e96678a6-3522-42bf-b670-2f997d8c474b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.696139] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 684.696139] env[63028]: value = "task-2735197" [ 684.696139] env[63028]: _type = "Task" [ 684.696139] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.712633] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735197, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.739780] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1003c868-12fe-4a41-9f35-cb5dbcc02563 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.747619] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4866ca-ea5d-4acc-a0fe-4f0b75d5c4c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.779123] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8f9d1b-3ded-42cf-b21e-803b944d78e9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.786610] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b881467-0b87-4c56-9c9d-52cb69dbda32 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.802218] env[63028]: DEBUG nova.compute.provider_tree [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 684.867628] env[63028]: DEBUG nova.compute.manager [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 684.868273] env[63028]: DEBUG nova.virt.hardware [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 684.868550] env[63028]: DEBUG nova.virt.hardware [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.868816] env[63028]: DEBUG nova.virt.hardware [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 684.869038] env[63028]: DEBUG nova.virt.hardware [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.869194] env[63028]: DEBUG nova.virt.hardware [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 684.869345] env[63028]: DEBUG nova.virt.hardware [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 684.869552] env[63028]: DEBUG nova.virt.hardware [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 684.869711] env[63028]: DEBUG nova.virt.hardware [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 684.869881] env[63028]: DEBUG nova.virt.hardware [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 684.870216] env[63028]: DEBUG nova.virt.hardware [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 684.870466] env[63028]: DEBUG nova.virt.hardware [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 684.871417] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e031d3a5-48f4-49bc-b4a6-72003eff7ac4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.880365] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e5eaf4-e469-462b-b258-19841c5c7440 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.900066] env[63028]: DEBUG nova.network.neutron [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.911856] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525f4358-0235-a0f0-4dad-f7e5fff67253, 'name': SearchDatastore_Task, 'duration_secs': 0.010102} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.912738] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d44d4f5-5762-411d-a349-a196838732eb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.919989] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Waiting for the task: (returnval){ [ 684.919989] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52869022-1a0d-4aab-f7a9-729bd3fe3747" [ 684.919989] env[63028]: _type = "Task" [ 684.919989] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.928447] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52869022-1a0d-4aab-f7a9-729bd3fe3747, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.126542] env[63028]: DEBUG nova.network.neutron [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Updating instance_info_cache with network_info: [{"id": "9197f89b-957f-4d27-a314-ca95bd44a77d", "address": "fa:16:3e:8b:13:88", "network": {"id": "81ce2836-38cd-4178-acfe-248cc85d0cb8", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1238287143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18767e9ea2fb48a186d76bcb28d00acb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9197f89b-95", "ovs_interfaceid": "9197f89b-957f-4d27-a314-ca95bd44a77d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.208973] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735197, 'name': Rename_Task, 'duration_secs': 0.133168} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.209260] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 685.209543] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-622c1a6f-133b-4b4a-a509-5d8c1531bcdc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.216270] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 685.216270] env[63028]: value = "task-2735198" [ 685.216270] env[63028]: _type = "Task" [ 685.216270] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.223832] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735198, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.353310] env[63028]: DEBUG nova.scheduler.client.report [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Updated inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with generation 62 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 685.354404] env[63028]: DEBUG nova.compute.provider_tree [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 62 to 63 during operation: update_inventory {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 685.356830] env[63028]: DEBUG nova.compute.provider_tree [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 685.435350] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52869022-1a0d-4aab-f7a9-729bd3fe3747, 'name': SearchDatastore_Task, 'duration_secs': 0.009618} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.435629] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.435881] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3/8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 685.436163] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c88abf3c-4495-4fce-947f-c524c4225633 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.443835] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Waiting for the task: (returnval){ [ 685.443835] env[63028]: value = "task-2735199" [ 685.443835] env[63028]: _type = "Task" [ 685.443835] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.453560] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735199, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.632243] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Releasing lock "refresh_cache-8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.632684] env[63028]: DEBUG nova.compute.manager [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Instance network_info: |[{"id": "9197f89b-957f-4d27-a314-ca95bd44a77d", "address": "fa:16:3e:8b:13:88", "network": {"id": "81ce2836-38cd-4178-acfe-248cc85d0cb8", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1238287143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18767e9ea2fb48a186d76bcb28d00acb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9197f89b-95", "ovs_interfaceid": "9197f89b-957f-4d27-a314-ca95bd44a77d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 685.633244] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:13:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9197f89b-957f-4d27-a314-ca95bd44a77d', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.641846] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Creating folder: Project (18767e9ea2fb48a186d76bcb28d00acb). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.642674] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd8fdcce-f503-411a-8146-91835f0fb67f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.663434] env[63028]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 685.663644] env[63028]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63028) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 685.664069] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Folder already exists: Project (18767e9ea2fb48a186d76bcb28d00acb). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 685.664313] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Creating folder: Instances. Parent ref: group-v550615. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.664850] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-731e8595-277c-4deb-a587-86149d46fd19 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.677215] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Created folder: Instances in parent group-v550615. [ 685.677321] env[63028]: DEBUG oslo.service.loopingcall [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 685.677575] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 685.678183] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d92835b1-4778-4a70-8aac-a91135205fb4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.699416] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.699416] env[63028]: value = "task-2735202" [ 685.699416] env[63028]: _type = "Task" [ 685.699416] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.716775] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735202, 'name': CreateVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.734084] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735198, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.739487] env[63028]: DEBUG nova.compute.manager [req-64f1af0d-cc3f-411c-bd8b-6f36ac6b13d6 req-ce45cd09-e61c-4343-bb0e-a309f0d00611 service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Received event network-changed-9197f89b-957f-4d27-a314-ca95bd44a77d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 685.739992] env[63028]: DEBUG nova.compute.manager [req-64f1af0d-cc3f-411c-bd8b-6f36ac6b13d6 req-ce45cd09-e61c-4343-bb0e-a309f0d00611 service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Refreshing instance network info cache due to event network-changed-9197f89b-957f-4d27-a314-ca95bd44a77d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 685.740371] env[63028]: DEBUG oslo_concurrency.lockutils [req-64f1af0d-cc3f-411c-bd8b-6f36ac6b13d6 req-ce45cd09-e61c-4343-bb0e-a309f0d00611 service nova] Acquiring lock "refresh_cache-8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.740479] env[63028]: DEBUG oslo_concurrency.lockutils [req-64f1af0d-cc3f-411c-bd8b-6f36ac6b13d6 req-ce45cd09-e61c-4343-bb0e-a309f0d00611 service nova] Acquired lock "refresh_cache-8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.740669] env[63028]: DEBUG nova.network.neutron [req-64f1af0d-cc3f-411c-bd8b-6f36ac6b13d6 req-ce45cd09-e61c-4343-bb0e-a309f0d00611 service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Refreshing network info cache for port 9197f89b-957f-4d27-a314-ca95bd44a77d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.861633] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.670s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.864021] env[63028]: DEBUG nova.compute.manager [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 685.865534] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.416s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.867690] env[63028]: INFO nova.compute.claims [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 685.954515] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735199, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.208984] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735202, 'name': CreateVM_Task, 'duration_secs': 0.505664} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.209044] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 686.209696] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'guest_format': None, 'disk_bus': None, 'mount_device': '/dev/sda', 'attachment_id': '1a55ad23-45a7-40ae-94c2-35cff7ed90a8', 'boot_index': 0, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550626', 'volume_id': '39116d21-d007-4c27-9ce1-9f92bb99f75c', 'name': 'volume-39116d21-d007-4c27-9ce1-9f92bb99f75c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8c7c8713-d5d7-490e-aba5-25d98bfbfaa0', 'attached_at': '', 'detached_at': '', 'volume_id': '39116d21-d007-4c27-9ce1-9f92bb99f75c', 'serial': '39116d21-d007-4c27-9ce1-9f92bb99f75c'}, 'volume_type': None}], 'swap': None} {{(pid=63028) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 686.209930] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Root volume attach. Driver type: vmdk {{(pid=63028) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 686.210697] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8783cda9-4f63-474e-bfd7-153327ecf995 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.221384] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e4cddf-66d1-472c-a486-ec5dc650a520 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.228535] env[63028]: DEBUG oslo_vmware.api [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735198, 'name': PowerOnVM_Task, 'duration_secs': 0.55525} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.230131] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 686.230350] env[63028]: INFO nova.compute.manager [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Took 8.42 seconds to spawn the instance on the hypervisor. [ 686.230531] env[63028]: DEBUG nova.compute.manager [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 686.231308] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49f09eb-9c98-409d-a83b-c3c3d5c306d1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.234259] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f35490-4ce3-4f89-82aa-eddee937a547 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.241737] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-48d58be9-c627-4257-9433-132074735764 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.255078] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Waiting for the task: (returnval){ [ 686.255078] env[63028]: value = "task-2735203" [ 686.255078] env[63028]: _type = "Task" [ 686.255078] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.262177] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735203, 'name': RelocateVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.375289] env[63028]: DEBUG nova.compute.utils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 686.382949] env[63028]: DEBUG nova.compute.manager [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 686.382949] env[63028]: DEBUG nova.network.neutron [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 686.456812] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735199, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539371} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.457142] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3/8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 686.457369] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 686.457677] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e71fdea3-bbd9-4f80-bad4-41551e7ec9f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.463695] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Waiting for the task: (returnval){ [ 686.463695] env[63028]: value = "task-2735204" [ 686.463695] env[63028]: _type = "Task" [ 686.463695] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.472183] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735204, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.484159] env[63028]: DEBUG nova.policy [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f67c0caec73f4a8bb333737416142677', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3d1798e23e64325a3b6f699cd27d98f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 686.596250] env[63028]: DEBUG nova.network.neutron [req-64f1af0d-cc3f-411c-bd8b-6f36ac6b13d6 req-ce45cd09-e61c-4343-bb0e-a309f0d00611 service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Updated VIF entry in instance network info cache for port 9197f89b-957f-4d27-a314-ca95bd44a77d. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 686.596633] env[63028]: DEBUG nova.network.neutron [req-64f1af0d-cc3f-411c-bd8b-6f36ac6b13d6 req-ce45cd09-e61c-4343-bb0e-a309f0d00611 service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Updating instance_info_cache with network_info: [{"id": "9197f89b-957f-4d27-a314-ca95bd44a77d", "address": "fa:16:3e:8b:13:88", "network": {"id": "81ce2836-38cd-4178-acfe-248cc85d0cb8", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1238287143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18767e9ea2fb48a186d76bcb28d00acb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9197f89b-95", "ovs_interfaceid": "9197f89b-957f-4d27-a314-ca95bd44a77d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.763868] env[63028]: INFO nova.compute.manager [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Took 52.54 seconds to build instance. [ 686.778887] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735203, 'name': RelocateVM_Task} progress is 20%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.886353] env[63028]: DEBUG nova.compute.manager [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 686.973935] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735204, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06695} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.974381] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.975398] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67df9032-4c64-4e61-a03c-ce18250162d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.002464] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3/8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 687.008843] env[63028]: DEBUG nova.network.neutron [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Successfully created port: 66d0224f-a46e-422c-8658-e35f39636307 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 687.011493] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7de2ba0a-0a16-40d2-a716-84f909cf5d23 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.031930] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Waiting for the task: (returnval){ [ 687.031930] env[63028]: value = "task-2735205" [ 687.031930] env[63028]: _type = "Task" [ 687.031930] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.040392] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735205, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.100563] env[63028]: DEBUG oslo_concurrency.lockutils [req-64f1af0d-cc3f-411c-bd8b-6f36ac6b13d6 req-ce45cd09-e61c-4343-bb0e-a309f0d00611 service nova] Releasing lock "refresh_cache-8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.271360] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c62dbb04-bda2-4f0b-9942-c38a36376853 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "b9db75ba-6832-45e8-8faf-d1cdaa7dabdd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.050s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.271598] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735203, 'name': RelocateVM_Task, 'duration_secs': 0.596451} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.276344] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 687.276344] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550626', 'volume_id': '39116d21-d007-4c27-9ce1-9f92bb99f75c', 'name': 'volume-39116d21-d007-4c27-9ce1-9f92bb99f75c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8c7c8713-d5d7-490e-aba5-25d98bfbfaa0', 'attached_at': '', 'detached_at': '', 'volume_id': '39116d21-d007-4c27-9ce1-9f92bb99f75c', 'serial': '39116d21-d007-4c27-9ce1-9f92bb99f75c'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 687.276344] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd691f8-b91e-4765-9736-92a04c61a106 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.295372] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e69d740-bc2f-490a-b5ed-fc1f9f7122df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.321273] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] volume-39116d21-d007-4c27-9ce1-9f92bb99f75c/volume-39116d21-d007-4c27-9ce1-9f92bb99f75c.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 687.324462] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45a2bbb0-a697-491e-9024-9e158801f328 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.344276] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Waiting for the task: (returnval){ [ 687.344276] env[63028]: value = "task-2735206" [ 687.344276] env[63028]: _type = "Task" [ 687.344276] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.352336] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735206, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.474453] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef87124d-896e-4ad2-96d8-07f561eb2ec8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.484361] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173b9bfc-9d80-4e28-b965-7ad9c078eca5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.518258] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71bb8eed-a8f1-423e-925f-a39d8140b217 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.525535] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52f6d22-a782-4f32-bda2-058f2ce5f184 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.540409] env[63028]: DEBUG nova.compute.provider_tree [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.552935] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735205, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.746966] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af54509-fef6-43be-872f-ed2cf32e7672 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.754161] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93493dc0-4590-4292-a5cb-2eaf68afd8fe tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Suspending the VM {{(pid=63028) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 687.754397] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-1bdd8b39-a5b0-4a5d-ae1c-9befe79dda60 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.759788] env[63028]: DEBUG oslo_vmware.api [None req-93493dc0-4590-4292-a5cb-2eaf68afd8fe tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 687.759788] env[63028]: value = "task-2735207" [ 687.759788] env[63028]: _type = "Task" [ 687.759788] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.772029] env[63028]: DEBUG oslo_vmware.api [None req-93493dc0-4590-4292-a5cb-2eaf68afd8fe tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735207, 'name': SuspendVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.774944] env[63028]: DEBUG nova.compute.manager [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 687.855049] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735206, 'name': ReconfigVM_Task, 'duration_secs': 0.348579} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.855152] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Reconfigured VM instance instance-00000020 to attach disk [datastore1] volume-39116d21-d007-4c27-9ce1-9f92bb99f75c/volume-39116d21-d007-4c27-9ce1-9f92bb99f75c.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 687.859896] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2737624e-5faa-4dee-b0fe-cafde900ed4b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.875476] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Waiting for the task: (returnval){ [ 687.875476] env[63028]: value = "task-2735208" [ 687.875476] env[63028]: _type = "Task" [ 687.875476] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.886382] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735208, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.899903] env[63028]: DEBUG nova.compute.manager [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 687.927809] env[63028]: DEBUG nova.virt.hardware [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 687.928104] env[63028]: DEBUG nova.virt.hardware [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.928273] env[63028]: DEBUG nova.virt.hardware [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 687.928453] env[63028]: DEBUG nova.virt.hardware [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.928624] env[63028]: DEBUG nova.virt.hardware [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 687.928890] env[63028]: DEBUG nova.virt.hardware [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 687.929188] env[63028]: DEBUG nova.virt.hardware [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 687.929374] env[63028]: DEBUG nova.virt.hardware [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 687.929549] env[63028]: DEBUG nova.virt.hardware [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 687.929712] env[63028]: DEBUG nova.virt.hardware [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 687.930244] env[63028]: DEBUG nova.virt.hardware [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 687.931261] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a72c000-f5f4-4943-83f5-2bb30b4a0047 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.939804] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e16373f-a9e5-4b2d-87f4-7cc8763b392a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.047888] env[63028]: DEBUG nova.scheduler.client.report [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 688.063886] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735205, 'name': ReconfigVM_Task, 'duration_secs': 0.685964} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.064465] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3/8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 688.065120] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-803bda6b-6557-4366-a024-561299664cce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.072206] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Waiting for the task: (returnval){ [ 688.072206] env[63028]: value = "task-2735209" [ 688.072206] env[63028]: _type = "Task" [ 688.072206] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.088025] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735209, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.269785] env[63028]: DEBUG oslo_vmware.api [None req-93493dc0-4590-4292-a5cb-2eaf68afd8fe tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735207, 'name': SuspendVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.313557] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.385715] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735208, 'name': ReconfigVM_Task, 'duration_secs': 0.194241} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.386080] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550626', 'volume_id': '39116d21-d007-4c27-9ce1-9f92bb99f75c', 'name': 'volume-39116d21-d007-4c27-9ce1-9f92bb99f75c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8c7c8713-d5d7-490e-aba5-25d98bfbfaa0', 'attached_at': '', 'detached_at': '', 'volume_id': '39116d21-d007-4c27-9ce1-9f92bb99f75c', 'serial': '39116d21-d007-4c27-9ce1-9f92bb99f75c'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 688.386832] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4555bc5-3995-46d0-a9b1-dcd4363fe26c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.394392] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Waiting for the task: (returnval){ [ 688.394392] env[63028]: value = "task-2735210" [ 688.394392] env[63028]: _type = "Task" [ 688.394392] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.405243] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735210, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.565664] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.700s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.567072] env[63028]: DEBUG nova.compute.manager [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 688.572640] env[63028]: DEBUG oslo_concurrency.lockutils [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 40.962s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.573585] env[63028]: DEBUG nova.objects.instance [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63028) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 688.587238] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735209, 'name': Rename_Task, 'duration_secs': 0.176264} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.587238] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 688.587418] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-680b409e-511f-488d-93ce-c602f8a56358 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.596507] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Waiting for the task: (returnval){ [ 688.596507] env[63028]: value = "task-2735211" [ 688.596507] env[63028]: _type = "Task" [ 688.596507] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.606650] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735211, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.773771] env[63028]: DEBUG oslo_vmware.api [None req-93493dc0-4590-4292-a5cb-2eaf68afd8fe tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735207, 'name': SuspendVM_Task, 'duration_secs': 0.632956} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.774067] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93493dc0-4590-4292-a5cb-2eaf68afd8fe tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Suspended the VM {{(pid=63028) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 688.774239] env[63028]: DEBUG nova.compute.manager [None req-93493dc0-4590-4292-a5cb-2eaf68afd8fe tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 688.775272] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15419dcb-48a7-4b5c-8ff2-a5620ca04c7b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.857642] env[63028]: DEBUG nova.compute.manager [req-d9668bb0-17f5-42b6-9c58-671ccf83f742 req-dd64be85-5e75-428d-9894-6484f0f6a12d service nova] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Received event network-vif-plugged-66d0224f-a46e-422c-8658-e35f39636307 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 688.857642] env[63028]: DEBUG oslo_concurrency.lockutils [req-d9668bb0-17f5-42b6-9c58-671ccf83f742 req-dd64be85-5e75-428d-9894-6484f0f6a12d service nova] Acquiring lock "c3014718-1064-4ab9-9600-86490489ee4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.857974] env[63028]: DEBUG oslo_concurrency.lockutils [req-d9668bb0-17f5-42b6-9c58-671ccf83f742 req-dd64be85-5e75-428d-9894-6484f0f6a12d service nova] Lock "c3014718-1064-4ab9-9600-86490489ee4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.859205] env[63028]: DEBUG oslo_concurrency.lockutils [req-d9668bb0-17f5-42b6-9c58-671ccf83f742 req-dd64be85-5e75-428d-9894-6484f0f6a12d service nova] Lock "c3014718-1064-4ab9-9600-86490489ee4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.859205] env[63028]: DEBUG nova.compute.manager [req-d9668bb0-17f5-42b6-9c58-671ccf83f742 req-dd64be85-5e75-428d-9894-6484f0f6a12d service nova] [instance: c3014718-1064-4ab9-9600-86490489ee4b] No waiting events found dispatching network-vif-plugged-66d0224f-a46e-422c-8658-e35f39636307 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 688.859205] env[63028]: WARNING nova.compute.manager [req-d9668bb0-17f5-42b6-9c58-671ccf83f742 req-dd64be85-5e75-428d-9894-6484f0f6a12d service nova] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Received unexpected event network-vif-plugged-66d0224f-a46e-422c-8658-e35f39636307 for instance with vm_state building and task_state spawning. [ 688.906661] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735210, 'name': Rename_Task, 'duration_secs': 0.132512} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.907317] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 688.907317] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b27263f1-7ed8-4db4-b118-d40fcf283a46 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.914222] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Waiting for the task: (returnval){ [ 688.914222] env[63028]: value = "task-2735212" [ 688.914222] env[63028]: _type = "Task" [ 688.914222] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.923399] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735212, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.076133] env[63028]: DEBUG nova.compute.utils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 689.081025] env[63028]: DEBUG nova.compute.manager [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 689.081025] env[63028]: DEBUG nova.network.neutron [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 689.113374] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735211, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.130739] env[63028]: DEBUG nova.network.neutron [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Successfully updated port: 66d0224f-a46e-422c-8658-e35f39636307 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 689.173552] env[63028]: DEBUG nova.policy [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2dc38196ef6943ada7cadbff1cb8cf5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87c0a41206824be6a0c74211e80e7181', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 689.423856] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735212, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.586929] env[63028]: DEBUG nova.compute.manager [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 689.591412] env[63028]: DEBUG oslo_concurrency.lockutils [None req-30e7e053-37a2-402d-a298-8045eae5748c tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.592640] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.684s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.598915] env[63028]: INFO nova.compute.claims [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 689.612887] env[63028]: DEBUG oslo_vmware.api [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735211, 'name': PowerOnVM_Task, 'duration_secs': 0.643368} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.614315] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 689.614526] env[63028]: INFO nova.compute.manager [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Took 9.15 seconds to spawn the instance on the hypervisor. [ 689.614713] env[63028]: DEBUG nova.compute.manager [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 689.615850] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd631544-0660-4f22-a6a7-ec99b667331c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.633382] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "refresh_cache-c3014718-1064-4ab9-9600-86490489ee4b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 689.633756] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired lock "refresh_cache-c3014718-1064-4ab9-9600-86490489ee4b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.633932] env[63028]: DEBUG nova.network.neutron [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 689.690632] env[63028]: DEBUG nova.network.neutron [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Successfully created port: a5a7398c-2688-4f83-abb5-933faad7d16c {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.926671] env[63028]: DEBUG oslo_vmware.api [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735212, 'name': PowerOnVM_Task, 'duration_secs': 0.5474} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.926671] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 689.926671] env[63028]: INFO nova.compute.manager [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Took 5.06 seconds to spawn the instance on the hypervisor. [ 689.926671] env[63028]: DEBUG nova.compute.manager [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 689.926671] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e1f43f-cb8c-4fbe-8978-7f5413dded17 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.140282] env[63028]: INFO nova.compute.manager [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Took 54.75 seconds to build instance. [ 690.172943] env[63028]: DEBUG nova.network.neutron [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.302139] env[63028]: DEBUG nova.network.neutron [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Updating instance_info_cache with network_info: [{"id": "66d0224f-a46e-422c-8658-e35f39636307", "address": "fa:16:3e:1d:a5:fe", "network": {"id": "350f4b14-d211-48c8-b1dd-06a0dd5805d1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-987689362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b3d1798e23e64325a3b6f699cd27d98f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66d0224f-a4", "ovs_interfaceid": "66d0224f-a46e-422c-8658-e35f39636307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.453094] env[63028]: INFO nova.compute.manager [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Took 53.92 seconds to build instance. [ 690.602633] env[63028]: DEBUG nova.compute.manager [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 690.626400] env[63028]: DEBUG nova.virt.hardware [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 690.626667] env[63028]: DEBUG nova.virt.hardware [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.626825] env[63028]: DEBUG nova.virt.hardware [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 690.627010] env[63028]: DEBUG nova.virt.hardware [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.627165] env[63028]: DEBUG nova.virt.hardware [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 690.627308] env[63028]: DEBUG nova.virt.hardware [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 690.627511] env[63028]: DEBUG nova.virt.hardware [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 690.627690] env[63028]: DEBUG nova.virt.hardware [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 690.627860] env[63028]: DEBUG nova.virt.hardware [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 690.628799] env[63028]: DEBUG nova.virt.hardware [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 690.628799] env[63028]: DEBUG nova.virt.hardware [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 690.629596] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873865f2-d050-40c9-91aa-376968d10b68 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.641285] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8e6dda-bc73-48d1-8cec-5a91b7ababb8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.646884] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb9a34be-b0fe-4e26-a404-5da53e8a940a tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Lock "8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.214s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.758765] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "af87f1a5-b413-4b26-be91-474ad1f73df8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.758765] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "af87f1a5-b413-4b26-be91-474ad1f73df8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.810294] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Releasing lock "refresh_cache-c3014718-1064-4ab9-9600-86490489ee4b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.810294] env[63028]: DEBUG nova.compute.manager [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Instance network_info: |[{"id": "66d0224f-a46e-422c-8658-e35f39636307", "address": "fa:16:3e:1d:a5:fe", "network": {"id": "350f4b14-d211-48c8-b1dd-06a0dd5805d1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-987689362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b3d1798e23e64325a3b6f699cd27d98f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66d0224f-a4", "ovs_interfaceid": "66d0224f-a46e-422c-8658-e35f39636307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 690.810490] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:a5:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66d0224f-a46e-422c-8658-e35f39636307', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 690.819383] env[63028]: DEBUG oslo.service.loopingcall [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 690.826319] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 690.827700] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9c426d2-8205-410c-8ff1-09849cc49fd7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.849782] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 690.849782] env[63028]: value = "task-2735213" [ 690.849782] env[63028]: _type = "Task" [ 690.849782] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.861167] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735213, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.955038] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8d45a3e-08b3-4b12-b769-c8ee0aba7c03 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Lock "8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.553s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.022642] env[63028]: DEBUG nova.compute.manager [req-52474d2c-819b-46a8-aee3-c5517c932823 req-66c9a21f-c74a-4f71-9fc4-24a08b837a3d service nova] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Received event network-changed-66d0224f-a46e-422c-8658-e35f39636307 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 691.022859] env[63028]: DEBUG nova.compute.manager [req-52474d2c-819b-46a8-aee3-c5517c932823 req-66c9a21f-c74a-4f71-9fc4-24a08b837a3d service nova] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Refreshing instance network info cache due to event network-changed-66d0224f-a46e-422c-8658-e35f39636307. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 691.023177] env[63028]: DEBUG oslo_concurrency.lockutils [req-52474d2c-819b-46a8-aee3-c5517c932823 req-66c9a21f-c74a-4f71-9fc4-24a08b837a3d service nova] Acquiring lock "refresh_cache-c3014718-1064-4ab9-9600-86490489ee4b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.023269] env[63028]: DEBUG oslo_concurrency.lockutils [req-52474d2c-819b-46a8-aee3-c5517c932823 req-66c9a21f-c74a-4f71-9fc4-24a08b837a3d service nova] Acquired lock "refresh_cache-c3014718-1064-4ab9-9600-86490489ee4b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.023438] env[63028]: DEBUG nova.network.neutron [req-52474d2c-819b-46a8-aee3-c5517c932823 req-66c9a21f-c74a-4f71-9fc4-24a08b837a3d service nova] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Refreshing network info cache for port 66d0224f-a46e-422c-8658-e35f39636307 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 691.153276] env[63028]: DEBUG nova.compute.manager [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 691.226166] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5ef4a7-ee29-439e-800b-892a9ca18cf6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.234834] env[63028]: DEBUG nova.compute.manager [req-4f7d897a-b629-48ce-919a-79a5f9dd9a7f req-c2e5c871-9f53-4e6a-b711-d330b973fe0c service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Received event network-changed-9197f89b-957f-4d27-a314-ca95bd44a77d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 691.235158] env[63028]: DEBUG nova.compute.manager [req-4f7d897a-b629-48ce-919a-79a5f9dd9a7f req-c2e5c871-9f53-4e6a-b711-d330b973fe0c service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Refreshing instance network info cache due to event network-changed-9197f89b-957f-4d27-a314-ca95bd44a77d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 691.235373] env[63028]: DEBUG oslo_concurrency.lockutils [req-4f7d897a-b629-48ce-919a-79a5f9dd9a7f req-c2e5c871-9f53-4e6a-b711-d330b973fe0c service nova] Acquiring lock "refresh_cache-8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.235547] env[63028]: DEBUG oslo_concurrency.lockutils [req-4f7d897a-b629-48ce-919a-79a5f9dd9a7f req-c2e5c871-9f53-4e6a-b711-d330b973fe0c service nova] Acquired lock "refresh_cache-8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.235735] env[63028]: DEBUG nova.network.neutron [req-4f7d897a-b629-48ce-919a-79a5f9dd9a7f req-c2e5c871-9f53-4e6a-b711-d330b973fe0c service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Refreshing network info cache for port 9197f89b-957f-4d27-a314-ca95bd44a77d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 691.242604] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0997a684-7354-4b04-9034-1db44ca1768b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.275403] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223fbcbd-a88b-4e5e-9f58-4d3e5092460b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.283278] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e6ae202-91c4-4778-9423-9eaae37f32c8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.297846] env[63028]: DEBUG nova.compute.provider_tree [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.307356] env[63028]: DEBUG nova.compute.manager [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.308294] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29eca1ee-620b-4692-9b9d-07e605530c28 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.317692] env[63028]: DEBUG nova.network.neutron [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Successfully updated port: a5a7398c-2688-4f83-abb5-933faad7d16c {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 691.358840] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735213, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.463045] env[63028]: DEBUG nova.compute.manager [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 691.677951] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.803102] env[63028]: DEBUG nova.scheduler.client.report [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 691.820081] env[63028]: INFO nova.compute.manager [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] instance snapshotting [ 691.820255] env[63028]: WARNING nova.compute.manager [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 691.822608] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Acquiring lock "refresh_cache-c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.822748] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Acquired lock "refresh_cache-c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.822890] env[63028]: DEBUG nova.network.neutron [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 691.824421] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21762eb9-8503-453e-a489-dededdbf190a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.848850] env[63028]: DEBUG nova.network.neutron [req-52474d2c-819b-46a8-aee3-c5517c932823 req-66c9a21f-c74a-4f71-9fc4-24a08b837a3d service nova] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Updated VIF entry in instance network info cache for port 66d0224f-a46e-422c-8658-e35f39636307. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 691.849300] env[63028]: DEBUG nova.network.neutron [req-52474d2c-819b-46a8-aee3-c5517c932823 req-66c9a21f-c74a-4f71-9fc4-24a08b837a3d service nova] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Updating instance_info_cache with network_info: [{"id": "66d0224f-a46e-422c-8658-e35f39636307", "address": "fa:16:3e:1d:a5:fe", "network": {"id": "350f4b14-d211-48c8-b1dd-06a0dd5805d1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-987689362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b3d1798e23e64325a3b6f699cd27d98f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66d0224f-a4", "ovs_interfaceid": "66d0224f-a46e-422c-8658-e35f39636307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.850927] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585bfbcb-369e-4ee5-9f77-d327330bf03a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.873890] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735213, 'name': CreateVM_Task, 'duration_secs': 0.552427} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.874849] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 691.875922] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.877060] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.877485] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 691.878273] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c227e58-f447-4cb7-bd21-30bdb8d50a62 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.883824] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 691.883824] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52998f34-11df-f6fe-2dfe-f6bf125baf8c" [ 691.883824] env[63028]: _type = "Task" [ 691.883824] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.892589] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52998f34-11df-f6fe-2dfe-f6bf125baf8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.913637] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Acquiring lock "8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.913917] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Lock "8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.914167] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Acquiring lock "8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.914343] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Lock "8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.914511] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Lock "8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.916791] env[63028]: INFO nova.compute.manager [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Terminating instance [ 691.987973] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.111428] env[63028]: DEBUG nova.network.neutron [req-4f7d897a-b629-48ce-919a-79a5f9dd9a7f req-c2e5c871-9f53-4e6a-b711-d330b973fe0c service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Updated VIF entry in instance network info cache for port 9197f89b-957f-4d27-a314-ca95bd44a77d. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 692.111428] env[63028]: DEBUG nova.network.neutron [req-4f7d897a-b629-48ce-919a-79a5f9dd9a7f req-c2e5c871-9f53-4e6a-b711-d330b973fe0c service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Updating instance_info_cache with network_info: [{"id": "9197f89b-957f-4d27-a314-ca95bd44a77d", "address": "fa:16:3e:8b:13:88", "network": {"id": "81ce2836-38cd-4178-acfe-248cc85d0cb8", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1238287143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18767e9ea2fb48a186d76bcb28d00acb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9197f89b-95", "ovs_interfaceid": "9197f89b-957f-4d27-a314-ca95bd44a77d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.308053] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.308682] env[63028]: DEBUG nova.compute.manager [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 692.311565] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.381s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.311832] env[63028]: DEBUG nova.objects.instance [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lazy-loading 'resources' on Instance uuid 2ae111f7-4eaa-46c2-ab97-907daa913834 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 692.356680] env[63028]: DEBUG oslo_concurrency.lockutils [req-52474d2c-819b-46a8-aee3-c5517c932823 req-66c9a21f-c74a-4f71-9fc4-24a08b837a3d service nova] Releasing lock "refresh_cache-c3014718-1064-4ab9-9600-86490489ee4b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.358135] env[63028]: DEBUG nova.network.neutron [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.375885] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 692.376235] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7846e821-521b-492c-82ed-d2d5dc9b43ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.389971] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 692.389971] env[63028]: value = "task-2735214" [ 692.389971] env[63028]: _type = "Task" [ 692.389971] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.397351] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52998f34-11df-f6fe-2dfe-f6bf125baf8c, 'name': SearchDatastore_Task, 'duration_secs': 0.012416} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.398044] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.398294] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 692.398654] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.398853] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.399060] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 692.402124] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-411a6804-547b-4a98-a843-41fcb45ddaae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.403874] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735214, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.410443] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 692.410620] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 692.411336] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b598c01-218a-4257-8cc8-00c4f0060bb1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.416921] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 692.416921] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a20282-491b-b4eb-e980-664e2490d84a" [ 692.416921] env[63028]: _type = "Task" [ 692.416921] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.424646] env[63028]: DEBUG nova.compute.manager [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 692.424871] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 692.425715] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf0a56d-89b2-4dd5-b304-067bd294b5e8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.433850] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a20282-491b-b4eb-e980-664e2490d84a, 'name': SearchDatastore_Task, 'duration_secs': 0.009695} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.436335] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 692.436563] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb5b60ea-b689-4b46-9154-b89dca533e49 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.438808] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9fb9266e-eb9f-4b27-84f3-d24ec2256c52 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.442662] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 692.442662] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5231071a-0434-0203-18a8-8b7785aadc8b" [ 692.442662] env[63028]: _type = "Task" [ 692.442662] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.446987] env[63028]: DEBUG oslo_vmware.api [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Waiting for the task: (returnval){ [ 692.446987] env[63028]: value = "task-2735215" [ 692.446987] env[63028]: _type = "Task" [ 692.446987] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.453217] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5231071a-0434-0203-18a8-8b7785aadc8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.457693] env[63028]: DEBUG oslo_vmware.api [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735215, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.537496] env[63028]: DEBUG nova.network.neutron [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Updating instance_info_cache with network_info: [{"id": "a5a7398c-2688-4f83-abb5-933faad7d16c", "address": "fa:16:3e:74:23:47", "network": {"id": "6f2eed6a-7f4d-4c5e-89fb-f23c7837ab0e", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1905115091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0a41206824be6a0c74211e80e7181", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5a7398c-26", "ovs_interfaceid": "a5a7398c-2688-4f83-abb5-933faad7d16c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.606873] env[63028]: DEBUG oslo_concurrency.lockutils [req-4f7d897a-b629-48ce-919a-79a5f9dd9a7f req-c2e5c871-9f53-4e6a-b711-d330b973fe0c service nova] Releasing lock "refresh_cache-8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.817713] env[63028]: DEBUG nova.compute.utils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 692.821194] env[63028]: DEBUG nova.compute.manager [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 692.821194] env[63028]: DEBUG nova.network.neutron [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 692.875512] env[63028]: DEBUG nova.policy [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3ed8f5b3d7b4be99d3b4649e156af58', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '847e89af959a4266ab55c1d2106ba8fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 692.902668] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735214, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.958591] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5231071a-0434-0203-18a8-8b7785aadc8b, 'name': SearchDatastore_Task, 'duration_secs': 0.011024} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.959472] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.959768] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c3014718-1064-4ab9-9600-86490489ee4b/c3014718-1064-4ab9-9600-86490489ee4b.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 692.960047] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c88fc2d6-b86f-4876-a1fb-39bc4d0f4ba6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.965405] env[63028]: DEBUG oslo_vmware.api [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735215, 'name': PowerOffVM_Task, 'duration_secs': 0.209382} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.966032] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 692.966230] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 692.966801] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3588b480-2351-43b6-8d24-bcd42080e17d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.972050] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 692.972050] env[63028]: value = "task-2735216" [ 692.972050] env[63028]: _type = "Task" [ 692.972050] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.979846] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735216, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.032863] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 693.033408] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 693.033408] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Deleting the datastore file [datastore1] 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 693.033587] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d1c7a69-ede5-4a78-9b1d-ab7796249f4c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.041412] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Releasing lock "refresh_cache-c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.041412] env[63028]: DEBUG nova.compute.manager [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Instance network_info: |[{"id": "a5a7398c-2688-4f83-abb5-933faad7d16c", "address": "fa:16:3e:74:23:47", "network": {"id": "6f2eed6a-7f4d-4c5e-89fb-f23c7837ab0e", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1905115091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0a41206824be6a0c74211e80e7181", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5a7398c-26", "ovs_interfaceid": "a5a7398c-2688-4f83-abb5-933faad7d16c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 693.041537] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:23:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98f447de-d71e-41ef-bc37-ed97b4a1f58f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a5a7398c-2688-4f83-abb5-933faad7d16c', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 693.048900] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Creating folder: Project (87c0a41206824be6a0c74211e80e7181). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.052863] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58989921-4d39-417f-b5d1-32059b06d849 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.054684] env[63028]: DEBUG oslo_vmware.api [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Waiting for the task: (returnval){ [ 693.054684] env[63028]: value = "task-2735218" [ 693.054684] env[63028]: _type = "Task" [ 693.054684] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.063270] env[63028]: DEBUG oslo_vmware.api [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735218, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.064467] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Created folder: Project (87c0a41206824be6a0c74211e80e7181) in parent group-v550570. [ 693.064647] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Creating folder: Instances. Parent ref: group-v550679. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.064882] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ea05f23-6804-443b-ac5c-4df7eac6af8c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.073995] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Created folder: Instances in parent group-v550679. [ 693.074373] env[63028]: DEBUG oslo.service.loopingcall [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 693.074562] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 693.075230] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15cc88c2-a9b4-495b-98b0-111e9b6ee051 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.097859] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 693.097859] env[63028]: value = "task-2735221" [ 693.097859] env[63028]: _type = "Task" [ 693.097859] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.105125] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735221, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.155350] env[63028]: DEBUG nova.compute.manager [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Received event network-vif-plugged-a5a7398c-2688-4f83-abb5-933faad7d16c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 693.155591] env[63028]: DEBUG oslo_concurrency.lockutils [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] Acquiring lock "c0db2b2a-9c06-409c-b48b-a0d5c127f2dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.155836] env[63028]: DEBUG oslo_concurrency.lockutils [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] Lock "c0db2b2a-9c06-409c-b48b-a0d5c127f2dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.156058] env[63028]: DEBUG oslo_concurrency.lockutils [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] Lock "c0db2b2a-9c06-409c-b48b-a0d5c127f2dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.156248] env[63028]: DEBUG nova.compute.manager [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] No waiting events found dispatching network-vif-plugged-a5a7398c-2688-4f83-abb5-933faad7d16c {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 693.156472] env[63028]: WARNING nova.compute.manager [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Received unexpected event network-vif-plugged-a5a7398c-2688-4f83-abb5-933faad7d16c for instance with vm_state building and task_state spawning. [ 693.156598] env[63028]: DEBUG nova.compute.manager [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Received event network-changed-a5a7398c-2688-4f83-abb5-933faad7d16c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 693.156869] env[63028]: DEBUG nova.compute.manager [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Refreshing instance network info cache due to event network-changed-a5a7398c-2688-4f83-abb5-933faad7d16c. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 693.157156] env[63028]: DEBUG oslo_concurrency.lockutils [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] Acquiring lock "refresh_cache-c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.157453] env[63028]: DEBUG oslo_concurrency.lockutils [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] Acquired lock "refresh_cache-c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.157745] env[63028]: DEBUG nova.network.neutron [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Refreshing network info cache for port a5a7398c-2688-4f83-abb5-933faad7d16c {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 693.291419] env[63028]: DEBUG nova.network.neutron [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Successfully created port: 7d007428-6d28-49a8-aa26-6b6ec99613c2 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 693.324560] env[63028]: DEBUG nova.compute.manager [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 693.404660] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735214, 'name': CreateSnapshot_Task, 'duration_secs': 0.67885} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.407951] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 693.411350] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a091e0fd-9e70-4f15-81fb-f038c11ec49a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.438327] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b1b5d4-6b48-4e82-9116-b28582bbf106 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.445452] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9fdd88-75a8-4d62-9592-9926519e2494 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.483294] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0e0159-d7d9-4ea9-ae63-6d6fd461f172 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.491380] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735216, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.494696] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9243a9f2-8f48-45fa-b141-b575c43c8c28 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.508515] env[63028]: DEBUG nova.compute.provider_tree [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.574971] env[63028]: DEBUG oslo_vmware.api [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Task: {'id': task-2735218, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197524} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.575274] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 693.575461] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 693.575640] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 693.576069] env[63028]: INFO nova.compute.manager [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Took 1.15 seconds to destroy the instance on the hypervisor. [ 693.576348] env[63028]: DEBUG oslo.service.loopingcall [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 693.576545] env[63028]: DEBUG nova.compute.manager [-] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 693.576637] env[63028]: DEBUG nova.network.neutron [-] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.606649] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735221, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.927316] env[63028]: DEBUG nova.network.neutron [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Updated VIF entry in instance network info cache for port a5a7398c-2688-4f83-abb5-933faad7d16c. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 693.927316] env[63028]: DEBUG nova.network.neutron [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Updating instance_info_cache with network_info: [{"id": "a5a7398c-2688-4f83-abb5-933faad7d16c", "address": "fa:16:3e:74:23:47", "network": {"id": "6f2eed6a-7f4d-4c5e-89fb-f23c7837ab0e", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1905115091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0a41206824be6a0c74211e80e7181", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5a7398c-26", "ovs_interfaceid": "a5a7398c-2688-4f83-abb5-933faad7d16c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.940535] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 693.941505] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5c4834bc-2e8e-42b3-9641-28c3f9bae44e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.954128] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 693.954128] env[63028]: value = "task-2735222" [ 693.954128] env[63028]: _type = "Task" [ 693.954128] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.963869] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735222, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.992632] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735216, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561231} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.992901] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c3014718-1064-4ab9-9600-86490489ee4b/c3014718-1064-4ab9-9600-86490489ee4b.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 693.993135] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 693.993388] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2813f084-4730-432a-980e-bb12619e00b1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.999452] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 693.999452] env[63028]: value = "task-2735223" [ 693.999452] env[63028]: _type = "Task" [ 693.999452] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.008240] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735223, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.012814] env[63028]: DEBUG nova.scheduler.client.report [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 694.110960] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735221, 'name': CreateVM_Task, 'duration_secs': 0.566889} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.111637] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 694.112218] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.112485] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.112941] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 694.113361] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f66455bd-0d58-4b50-b97c-2caf251a2627 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.119593] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Waiting for the task: (returnval){ [ 694.119593] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52574727-e4cc-5bdf-a801-30d22117d942" [ 694.119593] env[63028]: _type = "Task" [ 694.119593] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.132430] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52574727-e4cc-5bdf-a801-30d22117d942, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.338171] env[63028]: DEBUG nova.compute.manager [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 694.359179] env[63028]: DEBUG nova.virt.hardware [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 694.359438] env[63028]: DEBUG nova.virt.hardware [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 694.359595] env[63028]: DEBUG nova.virt.hardware [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 694.359773] env[63028]: DEBUG nova.virt.hardware [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 694.360047] env[63028]: DEBUG nova.virt.hardware [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 694.360219] env[63028]: DEBUG nova.virt.hardware [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 694.360436] env[63028]: DEBUG nova.virt.hardware [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 694.360596] env[63028]: DEBUG nova.virt.hardware [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 694.360762] env[63028]: DEBUG nova.virt.hardware [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 694.360940] env[63028]: DEBUG nova.virt.hardware [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 694.361127] env[63028]: DEBUG nova.virt.hardware [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 694.361981] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43186e9-4def-49fc-bbea-e6b7e001786f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.364709] env[63028]: DEBUG nova.network.neutron [-] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.371617] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5826b91e-2dcf-4cb5-b05c-b5424da314bd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.442920] env[63028]: DEBUG oslo_concurrency.lockutils [req-2119a50d-d80f-4a98-95e0-2cd3e2350bea req-21f4e39a-df51-4fb6-8080-defff53cf64f service nova] Releasing lock "refresh_cache-c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.464401] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735222, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.508775] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735223, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.250918} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.509102] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 694.509775] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b8f15b-4fa9-469c-898e-930d0b313c1c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.525065] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.213s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.536158] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] c3014718-1064-4ab9-9600-86490489ee4b/c3014718-1064-4ab9-9600-86490489ee4b.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 694.536916] env[63028]: DEBUG oslo_concurrency.lockutils [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.226s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.537404] env[63028]: DEBUG nova.objects.instance [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Lazy-loading 'resources' on Instance uuid 99886410-ec47-46ad-9d07-ee3593006997 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 694.538570] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de2538e2-a6cd-4ddc-8192-05ecc76e9f42 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.557578] env[63028]: INFO nova.scheduler.client.report [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Deleted allocations for instance 2ae111f7-4eaa-46c2-ab97-907daa913834 [ 694.563870] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 694.563870] env[63028]: value = "task-2735224" [ 694.563870] env[63028]: _type = "Task" [ 694.563870] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.572485] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735224, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.631714] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52574727-e4cc-5bdf-a801-30d22117d942, 'name': SearchDatastore_Task, 'duration_secs': 0.032408} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.631991] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.632228] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 694.632453] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.632592] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.633394] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 694.633394] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3be76f86-0c9b-47af-b774-c882778d5af5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.641247] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 694.641419] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 694.642131] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6621936-4080-493c-8a2d-79c6c96d8a72 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.646845] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Waiting for the task: (returnval){ [ 694.646845] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529b23e3-f0fe-aa89-fd79-81ce95038864" [ 694.646845] env[63028]: _type = "Task" [ 694.646845] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.657236] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529b23e3-f0fe-aa89-fd79-81ce95038864, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.868443] env[63028]: INFO nova.compute.manager [-] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Took 1.29 seconds to deallocate network for instance. [ 694.945539] env[63028]: DEBUG nova.network.neutron [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Successfully updated port: 7d007428-6d28-49a8-aa26-6b6ec99613c2 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 694.965893] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735222, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.042696] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec16372-815a-44cb-bdd6-cc75975cd450 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.051742] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a896da0-380e-452f-9d90-3a10b4ce39f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.089775] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c131164-be90-408a-908b-02c8df7872e2 tempest-ServerShowV257Test-343532561 tempest-ServerShowV257Test-343532561-project-member] Lock "2ae111f7-4eaa-46c2-ab97-907daa913834" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.600s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.094389] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38975b9c-f48c-432c-929c-6963cd6208b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.105526] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735224, 'name': ReconfigVM_Task, 'duration_secs': 0.384431} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.106873] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Reconfigured VM instance instance-00000021 to attach disk [datastore2] c3014718-1064-4ab9-9600-86490489ee4b/c3014718-1064-4ab9-9600-86490489ee4b.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 695.108544] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-349d9c32-d8d6-470a-b122-7c46f852f74f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.111019] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d7f58a-1d97-4bb3-a9a4-3e363a7ca712 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.130249] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 695.130249] env[63028]: value = "task-2735225" [ 695.130249] env[63028]: _type = "Task" [ 695.130249] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.130913] env[63028]: DEBUG nova.compute.provider_tree [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.140569] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735225, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.158083] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529b23e3-f0fe-aa89-fd79-81ce95038864, 'name': SearchDatastore_Task, 'duration_secs': 0.008893} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.158952] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30cb0706-f6f6-456b-85c1-612a0ed7d2d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.164381] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Waiting for the task: (returnval){ [ 695.164381] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a1c4e8-177c-0c0a-4e6e-ae9e4873ddb7" [ 695.164381] env[63028]: _type = "Task" [ 695.164381] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.172354] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a1c4e8-177c-0c0a-4e6e-ae9e4873ddb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.188400] env[63028]: DEBUG nova.compute.manager [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Received event network-vif-deleted-4c75ac24-25df-4c2f-8ce9-e0b2ec182139 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 695.188599] env[63028]: DEBUG nova.compute.manager [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Received event network-vif-plugged-7d007428-6d28-49a8-aa26-6b6ec99613c2 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 695.188804] env[63028]: DEBUG oslo_concurrency.lockutils [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] Acquiring lock "d6137c80-0c09-4655-b264-472753b4fa9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.189009] env[63028]: DEBUG oslo_concurrency.lockutils [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] Lock "d6137c80-0c09-4655-b264-472753b4fa9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.189195] env[63028]: DEBUG oslo_concurrency.lockutils [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] Lock "d6137c80-0c09-4655-b264-472753b4fa9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.189421] env[63028]: DEBUG nova.compute.manager [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] No waiting events found dispatching network-vif-plugged-7d007428-6d28-49a8-aa26-6b6ec99613c2 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 695.189558] env[63028]: WARNING nova.compute.manager [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Received unexpected event network-vif-plugged-7d007428-6d28-49a8-aa26-6b6ec99613c2 for instance with vm_state building and task_state spawning. [ 695.189708] env[63028]: DEBUG nova.compute.manager [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Received event network-changed-7d007428-6d28-49a8-aa26-6b6ec99613c2 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 695.189817] env[63028]: DEBUG nova.compute.manager [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Refreshing instance network info cache due to event network-changed-7d007428-6d28-49a8-aa26-6b6ec99613c2. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 695.190014] env[63028]: DEBUG oslo_concurrency.lockutils [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] Acquiring lock "refresh_cache-d6137c80-0c09-4655-b264-472753b4fa9c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.190269] env[63028]: DEBUG oslo_concurrency.lockutils [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] Acquired lock "refresh_cache-d6137c80-0c09-4655-b264-472753b4fa9c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.190333] env[63028]: DEBUG nova.network.neutron [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Refreshing network info cache for port 7d007428-6d28-49a8-aa26-6b6ec99613c2 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 695.377474] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.448752] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "refresh_cache-d6137c80-0c09-4655-b264-472753b4fa9c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.466217] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735222, 'name': CloneVM_Task, 'duration_secs': 1.324437} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.466217] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Created linked-clone VM from snapshot [ 695.466743] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d46b75-e677-4ad4-8333-6b20c1455947 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.476588] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Uploading image de8a25c2-f41a-4e1f-8b5d-6ab5bcd87f55 {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 695.507568] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 695.507568] env[63028]: value = "vm-550682" [ 695.507568] env[63028]: _type = "VirtualMachine" [ 695.507568] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 695.507913] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b67cfcf2-7dd9-403e-b297-fe716f6b3e64 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.517137] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lease: (returnval){ [ 695.517137] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520fa61c-e3cb-1efd-e448-b9720a4fc30d" [ 695.517137] env[63028]: _type = "HttpNfcLease" [ 695.517137] env[63028]: } obtained for exporting VM: (result){ [ 695.517137] env[63028]: value = "vm-550682" [ 695.517137] env[63028]: _type = "VirtualMachine" [ 695.517137] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 695.517391] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the lease: (returnval){ [ 695.517391] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520fa61c-e3cb-1efd-e448-b9720a4fc30d" [ 695.517391] env[63028]: _type = "HttpNfcLease" [ 695.517391] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 695.524345] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 695.524345] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520fa61c-e3cb-1efd-e448-b9720a4fc30d" [ 695.524345] env[63028]: _type = "HttpNfcLease" [ 695.524345] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 695.637417] env[63028]: DEBUG nova.scheduler.client.report [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 695.645944] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735225, 'name': Rename_Task, 'duration_secs': 0.140853} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.646227] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 695.646422] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d9f46f1-40e7-4bb1-9fc9-1a8448e81c22 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.652713] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 695.652713] env[63028]: value = "task-2735227" [ 695.652713] env[63028]: _type = "Task" [ 695.652713] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.661662] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735227, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.674898] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a1c4e8-177c-0c0a-4e6e-ae9e4873ddb7, 'name': SearchDatastore_Task, 'duration_secs': 0.010199} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.675361] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.675428] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c0db2b2a-9c06-409c-b48b-a0d5c127f2dc/c0db2b2a-9c06-409c-b48b-a0d5c127f2dc.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 695.675678] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9802297-e703-45cd-bd08-1d21885fb9a2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.682517] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Waiting for the task: (returnval){ [ 695.682517] env[63028]: value = "task-2735228" [ 695.682517] env[63028]: _type = "Task" [ 695.682517] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.690443] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.723993] env[63028]: DEBUG nova.network.neutron [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.816900] env[63028]: DEBUG nova.network.neutron [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.025406] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 696.025406] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520fa61c-e3cb-1efd-e448-b9720a4fc30d" [ 696.025406] env[63028]: _type = "HttpNfcLease" [ 696.025406] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 696.025753] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 696.025753] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520fa61c-e3cb-1efd-e448-b9720a4fc30d" [ 696.025753] env[63028]: _type = "HttpNfcLease" [ 696.025753] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 696.026555] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6aac46-7528-4db1-b661-385ff6b4bebd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.034309] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52896259-cb0d-b23b-e3ea-45a4d8334fc5/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 696.034489] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52896259-cb0d-b23b-e3ea-45a4d8334fc5/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 696.142424] env[63028]: DEBUG oslo_concurrency.lockutils [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.605s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.144882] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.836s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.145138] env[63028]: DEBUG nova.objects.instance [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lazy-loading 'resources' on Instance uuid c9cc1ac7-06c6-415b-86ce-daf4849bfc05 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 696.161941] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2574c6c0-2620-44ae-acf0-666a9c6ee8a5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.167933] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735227, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.170155] env[63028]: INFO nova.scheduler.client.report [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Deleted allocations for instance 99886410-ec47-46ad-9d07-ee3593006997 [ 696.193209] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735228, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487017} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.198814] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c0db2b2a-9c06-409c-b48b-a0d5c127f2dc/c0db2b2a-9c06-409c-b48b-a0d5c127f2dc.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 696.199275] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 696.199899] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cbec1600-45af-46ae-b0c1-6e834901e2d5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.207059] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Waiting for the task: (returnval){ [ 696.207059] env[63028]: value = "task-2735229" [ 696.207059] env[63028]: _type = "Task" [ 696.207059] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.215606] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735229, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.319936] env[63028]: DEBUG oslo_concurrency.lockutils [req-98498424-0f5e-4124-94a4-19b07c0af368 req-a3fa6be2-fd48-4a1d-a10d-70d5bfc9a56e service nova] Releasing lock "refresh_cache-d6137c80-0c09-4655-b264-472753b4fa9c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.319936] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "refresh_cache-d6137c80-0c09-4655-b264-472753b4fa9c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.319936] env[63028]: DEBUG nova.network.neutron [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 696.667691] env[63028]: DEBUG oslo_vmware.api [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735227, 'name': PowerOnVM_Task, 'duration_secs': 0.645999} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.668344] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 696.668756] env[63028]: INFO nova.compute.manager [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Took 8.77 seconds to spawn the instance on the hypervisor. [ 696.673659] env[63028]: DEBUG nova.compute.manager [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 696.673659] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9874802-f7a0-4e87-8aae-c9eac9ffbb08 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.685167] env[63028]: DEBUG oslo_concurrency.lockutils [None req-41141995-e826-4e05-8236-cab93d9f99c2 tempest-ServerMetadataTestJSON-1491167938 tempest-ServerMetadataTestJSON-1491167938-project-member] Lock "99886410-ec47-46ad-9d07-ee3593006997" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.963s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.718650] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735229, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063716} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.718650] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 696.722481] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc52f6aa-0671-4093-9700-955349e66530 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.748833] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] c0db2b2a-9c06-409c-b48b-a0d5c127f2dc/c0db2b2a-9c06-409c-b48b-a0d5c127f2dc.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 696.752513] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ab613ca-b695-4373-bbd5-336e552ad8be {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.774161] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Waiting for the task: (returnval){ [ 696.774161] env[63028]: value = "task-2735230" [ 696.774161] env[63028]: _type = "Task" [ 696.774161] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.783175] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735230, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.882824] env[63028]: DEBUG nova.network.neutron [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.209935] env[63028]: INFO nova.compute.manager [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Took 50.91 seconds to build instance. [ 697.271065] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3048c2e5-6c63-4d96-8971-d17e667547da {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.282147] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1975fa78-379f-4d70-bdad-2ec97c722c50 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.288497] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.328319] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d8a58c-a719-4019-84a0-d5ee55a6a85c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.343721] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59618b7-08a8-4cd4-b925-01f426f4861b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.364028] env[63028]: DEBUG nova.compute.provider_tree [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.439625] env[63028]: DEBUG nova.network.neutron [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Updating instance_info_cache with network_info: [{"id": "7d007428-6d28-49a8-aa26-6b6ec99613c2", "address": "fa:16:3e:11:ca:39", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d007428-6d", "ovs_interfaceid": "7d007428-6d28-49a8-aa26-6b6ec99613c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.712241] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6749eca0-c477-497a-9500-20fbd646e0a7 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "c3014718-1064-4ab9-9600-86490489ee4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.328s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.788851] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735230, 'name': ReconfigVM_Task, 'duration_secs': 0.674149} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.789354] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Reconfigured VM instance instance-00000022 to attach disk [datastore2] c0db2b2a-9c06-409c-b48b-a0d5c127f2dc/c0db2b2a-9c06-409c-b48b-a0d5c127f2dc.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 697.790117] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-faf55a13-2e0d-429d-b457-22d1028c9e30 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.796484] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Waiting for the task: (returnval){ [ 697.796484] env[63028]: value = "task-2735231" [ 697.796484] env[63028]: _type = "Task" [ 697.796484] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.806390] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735231, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.866354] env[63028]: DEBUG nova.scheduler.client.report [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 697.943032] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "refresh_cache-d6137c80-0c09-4655-b264-472753b4fa9c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.944535] env[63028]: DEBUG nova.compute.manager [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Instance network_info: |[{"id": "7d007428-6d28-49a8-aa26-6b6ec99613c2", "address": "fa:16:3e:11:ca:39", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d007428-6d", "ovs_interfaceid": "7d007428-6d28-49a8-aa26-6b6ec99613c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 697.945038] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:ca:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d007428-6d28-49a8-aa26-6b6ec99613c2', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 697.955402] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Creating folder: Project (847e89af959a4266ab55c1d2106ba8fe). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 697.955740] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e089805-a763-47cb-9962-93979ae3819c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.972485] env[63028]: INFO nova.compute.manager [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Rescuing [ 697.972943] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "refresh_cache-c3014718-1064-4ab9-9600-86490489ee4b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.973124] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired lock "refresh_cache-c3014718-1064-4ab9-9600-86490489ee4b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.973369] env[63028]: DEBUG nova.network.neutron [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 697.978022] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Created folder: Project (847e89af959a4266ab55c1d2106ba8fe) in parent group-v550570. [ 697.978022] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Creating folder: Instances. Parent ref: group-v550683. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 697.978022] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a00e9bc2-6a94-4e36-ad88-63607b0b2829 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.985939] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Created folder: Instances in parent group-v550683. [ 697.986244] env[63028]: DEBUG oslo.service.loopingcall [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 697.986444] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 697.987900] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c63e76e-b6d3-401d-939d-cb5ec2d86be5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.008231] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 698.008231] env[63028]: value = "task-2735234" [ 698.008231] env[63028]: _type = "Task" [ 698.008231] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.016506] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735234, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.216419] env[63028]: DEBUG nova.compute.manager [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 698.306332] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735231, 'name': Rename_Task, 'duration_secs': 0.175486} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.306618] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 698.306864] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ada5a6df-68cc-45b1-a0f8-6b42a7859df1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.313460] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Waiting for the task: (returnval){ [ 698.313460] env[63028]: value = "task-2735235" [ 698.313460] env[63028]: _type = "Task" [ 698.313460] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.322406] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735235, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.377310] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.232s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.380291] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.999s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.382009] env[63028]: INFO nova.compute.claims [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 698.413518] env[63028]: INFO nova.scheduler.client.report [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Deleted allocations for instance c9cc1ac7-06c6-415b-86ce-daf4849bfc05 [ 698.520595] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735234, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.750337] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.770300] env[63028]: DEBUG nova.network.neutron [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Updating instance_info_cache with network_info: [{"id": "66d0224f-a46e-422c-8658-e35f39636307", "address": "fa:16:3e:1d:a5:fe", "network": {"id": "350f4b14-d211-48c8-b1dd-06a0dd5805d1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-987689362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b3d1798e23e64325a3b6f699cd27d98f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66d0224f-a4", "ovs_interfaceid": "66d0224f-a46e-422c-8658-e35f39636307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.825210] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735235, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.928502] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3778db47-461e-4bfc-8c37-f51d5973e5b9 tempest-ImagesOneServerTestJSON-699423091 tempest-ImagesOneServerTestJSON-699423091-project-member] Lock "c9cc1ac7-06c6-415b-86ce-daf4849bfc05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.155s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.022144] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735234, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.275642] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Releasing lock "refresh_cache-c3014718-1064-4ab9-9600-86490489ee4b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.329338] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735235, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.537279] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735234, 'name': CreateVM_Task, 'duration_secs': 1.395566} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.537598] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 699.539796] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.539976] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.540327] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 699.541065] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8c482cd-7464-4e40-b5f7-00be96dd9557 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.551535] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 699.551535] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52537346-6710-5ac8-5788-2be7c3e7e1db" [ 699.551535] env[63028]: _type = "Task" [ 699.551535] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.562954] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52537346-6710-5ac8-5788-2be7c3e7e1db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.825675] env[63028]: DEBUG oslo_vmware.api [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735235, 'name': PowerOnVM_Task, 'duration_secs': 1.248144} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.833029] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 699.833029] env[63028]: INFO nova.compute.manager [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Took 9.23 seconds to spawn the instance on the hypervisor. [ 699.833029] env[63028]: DEBUG nova.compute.manager [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 699.833606] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccf41b1-06bc-4468-a330-767e92b51ce5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.973749] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61b9c94-a4f1-43b0-bd18-2bfbebc4ed81 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.982394] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a7b2e7-6085-4d9a-a3a7-1202e1548a9f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.017509] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddb75ed-87c5-4a0c-8032-7062f447b9f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.025952] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e286685-7c8a-4583-9cdb-b69f430a2075 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.039790] env[63028]: DEBUG nova.compute.provider_tree [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.062878] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52537346-6710-5ac8-5788-2be7c3e7e1db, 'name': SearchDatastore_Task, 'duration_secs': 0.012977} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.063229] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.063484] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 700.063720] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.063895] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.064178] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 700.064475] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-413b976c-b1e5-405d-8f85-0f364b6fe20d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.073721] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 700.073932] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 700.074719] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0da61d27-4218-48e6-b164-8bc1a578c4d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.081668] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 700.081668] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52709daf-ea6f-726c-7adf-577ee04dcced" [ 700.081668] env[63028]: _type = "Task" [ 700.081668] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.090594] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52709daf-ea6f-726c-7adf-577ee04dcced, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.368702] env[63028]: INFO nova.compute.manager [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Took 53.94 seconds to build instance. [ 700.544280] env[63028]: DEBUG nova.scheduler.client.report [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 700.591549] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52709daf-ea6f-726c-7adf-577ee04dcced, 'name': SearchDatastore_Task, 'duration_secs': 0.012532} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.592357] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ee73815-54f9-4d8b-a5c8-02851c9ff9fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.598222] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 700.598222] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b221b9-669c-756c-eb52-35a88aa04bcd" [ 700.598222] env[63028]: _type = "Task" [ 700.598222] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.610305] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b221b9-669c-756c-eb52-35a88aa04bcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.819736] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 700.820149] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec9f74bd-aaf3-44e9-a1ea-e15c9d92f687 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.828194] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 700.828194] env[63028]: value = "task-2735236" [ 700.828194] env[63028]: _type = "Task" [ 700.828194] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.838556] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735236, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.875589] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3d4a8458-0f79-4648-8e42-1868292216b3 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Lock "c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.226s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.051662] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.671s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.053352] env[63028]: DEBUG nova.compute.manager [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 701.063889] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.235s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.065492] env[63028]: INFO nova.compute.claims [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 701.111382] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b221b9-669c-756c-eb52-35a88aa04bcd, 'name': SearchDatastore_Task, 'duration_secs': 0.01387} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.112167] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.112167] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] d6137c80-0c09-4655-b264-472753b4fa9c/d6137c80-0c09-4655-b264-472753b4fa9c.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 701.112167] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6cc68229-d62f-48f9-905e-aad0de59ea6c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.120363] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 701.120363] env[63028]: value = "task-2735237" [ 701.120363] env[63028]: _type = "Task" [ 701.120363] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.132463] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.337729] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735236, 'name': PowerOffVM_Task, 'duration_secs': 0.249564} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.338075] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 701.341707] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb045072-2a96-4baf-9dcf-d9b143bff083 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.363998] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e434e495-f37a-41a4-b7f1-549f58d82515 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.378309] env[63028]: DEBUG nova.compute.manager [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 701.417685] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 701.418073] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e27ffb4-c318-4645-82f1-b4348e8bc153 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.429322] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 701.429322] env[63028]: value = "task-2735238" [ 701.429322] env[63028]: _type = "Task" [ 701.429322] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.440739] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 701.441379] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 701.441379] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.441522] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.441571] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 701.441841] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-beddac10-80d0-4ec3-8611-ce4c0414cfaf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.454351] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 701.454498] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 701.455260] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc8f1712-4e81-4368-8277-1cdd84305068 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.464514] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 701.464514] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c0cce7-9e26-d1c0-1ff1-804cf2778529" [ 701.464514] env[63028]: _type = "Task" [ 701.464514] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.474489] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c0cce7-9e26-d1c0-1ff1-804cf2778529, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.570981] env[63028]: DEBUG nova.compute.utils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 701.576340] env[63028]: DEBUG nova.compute.manager [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 701.576549] env[63028]: DEBUG nova.network.neutron [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 701.635645] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735237, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.651764] env[63028]: DEBUG nova.policy [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a91edc39f874b408ba4ab84ea388846', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1a2ecd6338148e6a90a71bf1fc5c778', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 701.909139] env[63028]: DEBUG oslo_concurrency.lockutils [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.978097] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c0cce7-9e26-d1c0-1ff1-804cf2778529, 'name': SearchDatastore_Task, 'duration_secs': 0.017216} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.978933] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5353ef0-0e90-48ba-90ef-76539ae23b4f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.985094] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 701.985094] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fa6416-1010-7ced-de7d-acb6a2d6f515" [ 701.985094] env[63028]: _type = "Task" [ 701.985094] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.998849] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fa6416-1010-7ced-de7d-acb6a2d6f515, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.077764] env[63028]: DEBUG nova.compute.manager [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 702.137624] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735237, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60277} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.141056] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] d6137c80-0c09-4655-b264-472753b4fa9c/d6137c80-0c09-4655-b264-472753b4fa9c.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 702.141056] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 702.141351] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4feb01c7-f41c-4997-91bb-38f3099a1496 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.152171] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 702.152171] env[63028]: value = "task-2735239" [ 702.152171] env[63028]: _type = "Task" [ 702.152171] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.169037] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735239, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.292320] env[63028]: DEBUG nova.compute.manager [req-7524e1a4-666f-4c69-b148-2c7e7584932e req-1b37d834-36ec-41c9-98ba-ea67e3eb9953 service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Received event network-changed-a5a7398c-2688-4f83-abb5-933faad7d16c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 702.292472] env[63028]: DEBUG nova.compute.manager [req-7524e1a4-666f-4c69-b148-2c7e7584932e req-1b37d834-36ec-41c9-98ba-ea67e3eb9953 service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Refreshing instance network info cache due to event network-changed-a5a7398c-2688-4f83-abb5-933faad7d16c. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 702.292684] env[63028]: DEBUG oslo_concurrency.lockutils [req-7524e1a4-666f-4c69-b148-2c7e7584932e req-1b37d834-36ec-41c9-98ba-ea67e3eb9953 service nova] Acquiring lock "refresh_cache-c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.292833] env[63028]: DEBUG oslo_concurrency.lockutils [req-7524e1a4-666f-4c69-b148-2c7e7584932e req-1b37d834-36ec-41c9-98ba-ea67e3eb9953 service nova] Acquired lock "refresh_cache-c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.293035] env[63028]: DEBUG nova.network.neutron [req-7524e1a4-666f-4c69-b148-2c7e7584932e req-1b37d834-36ec-41c9-98ba-ea67e3eb9953 service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Refreshing network info cache for port a5a7398c-2688-4f83-abb5-933faad7d16c {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 702.313299] env[63028]: DEBUG nova.network.neutron [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Successfully created port: 8f389aaf-a460-47ed-862a-e45d83b3d9e3 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 702.504984] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fa6416-1010-7ced-de7d-acb6a2d6f515, 'name': SearchDatastore_Task, 'duration_secs': 0.012162} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.505454] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.505454] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c3014718-1064-4ab9-9600-86490489ee4b/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk. {{(pid=63028) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 702.505705] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1688a2b5-2ad5-4e1d-8fb5-9d5f274f312e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.514142] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 702.514142] env[63028]: value = "task-2735240" [ 702.514142] env[63028]: _type = "Task" [ 702.514142] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.531391] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735240, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.673361] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735239, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068737} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.676097] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 702.676097] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcedd571-1de9-49bf-baed-2fc54737fc04 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.714248] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] d6137c80-0c09-4655-b264-472753b4fa9c/d6137c80-0c09-4655-b264-472753b4fa9c.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 702.721677] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9984a6a-700f-4859-ba7b-33168b0103b8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.747647] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 702.747647] env[63028]: value = "task-2735241" [ 702.747647] env[63028]: _type = "Task" [ 702.747647] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.761816] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735241, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.813029] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88965220-5ffe-46c5-999a-4b4cab6a6ef7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.823227] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e38cd1b-6e06-4284-b73a-27b775606466 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.858458] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e31d00-219f-43fe-bed9-66594263b5c7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.867461] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2b5d0a-c296-4a8d-9c25-6629215f603a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.888176] env[63028]: DEBUG nova.compute.provider_tree [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.034459] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735240, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.098292] env[63028]: DEBUG nova.compute.manager [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 703.124869] env[63028]: DEBUG nova.virt.hardware [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 703.125030] env[63028]: DEBUG nova.virt.hardware [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 703.125344] env[63028]: DEBUG nova.virt.hardware [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 703.125681] env[63028]: DEBUG nova.virt.hardware [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 703.125888] env[63028]: DEBUG nova.virt.hardware [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 703.126062] env[63028]: DEBUG nova.virt.hardware [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 703.126281] env[63028]: DEBUG nova.virt.hardware [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 703.126463] env[63028]: DEBUG nova.virt.hardware [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 703.126633] env[63028]: DEBUG nova.virt.hardware [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 703.126812] env[63028]: DEBUG nova.virt.hardware [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 703.127035] env[63028]: DEBUG nova.virt.hardware [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 703.127960] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8fda5e-1aff-470c-a6d6-f53a5ab23063 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.139311] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3244851-b243-4ec9-a631-9078a13cdf3d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.260331] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735241, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.319508] env[63028]: DEBUG nova.network.neutron [req-7524e1a4-666f-4c69-b148-2c7e7584932e req-1b37d834-36ec-41c9-98ba-ea67e3eb9953 service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Updated VIF entry in instance network info cache for port a5a7398c-2688-4f83-abb5-933faad7d16c. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 703.319508] env[63028]: DEBUG nova.network.neutron [req-7524e1a4-666f-4c69-b148-2c7e7584932e req-1b37d834-36ec-41c9-98ba-ea67e3eb9953 service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Updating instance_info_cache with network_info: [{"id": "a5a7398c-2688-4f83-abb5-933faad7d16c", "address": "fa:16:3e:74:23:47", "network": {"id": "6f2eed6a-7f4d-4c5e-89fb-f23c7837ab0e", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1905115091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0a41206824be6a0c74211e80e7181", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5a7398c-26", "ovs_interfaceid": "a5a7398c-2688-4f83-abb5-933faad7d16c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.390474] env[63028]: DEBUG nova.scheduler.client.report [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 703.449850] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52896259-cb0d-b23b-e3ea-45a4d8334fc5/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 703.450827] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0dc29b-e652-43fc-9c5a-34b9d8e19328 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.456794] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52896259-cb0d-b23b-e3ea-45a4d8334fc5/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 703.456989] env[63028]: ERROR oslo_vmware.rw_handles [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52896259-cb0d-b23b-e3ea-45a4d8334fc5/disk-0.vmdk due to incomplete transfer. [ 703.457195] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c74c6647-eb5c-47fc-9675-d76cbefd79fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.463731] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52896259-cb0d-b23b-e3ea-45a4d8334fc5/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 703.463918] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Uploaded image de8a25c2-f41a-4e1f-8b5d-6ab5bcd87f55 to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 703.466093] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 703.466615] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5aad0e54-d415-410a-a6b6-b860639203ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.471705] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 703.471705] env[63028]: value = "task-2735242" [ 703.471705] env[63028]: _type = "Task" [ 703.471705] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.479405] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735242, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.531608] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735240, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542746} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.531608] env[63028]: INFO nova.virt.vmwareapi.ds_util [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c3014718-1064-4ab9-9600-86490489ee4b/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk. [ 703.531608] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a799f0a-01d2-4668-b0e3-4d6351967f9c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.556900] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] c3014718-1064-4ab9-9600-86490489ee4b/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 703.557411] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b22ae9ae-428e-4cb5-baf4-3952f2c1cc96 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.575599] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 703.575599] env[63028]: value = "task-2735243" [ 703.575599] env[63028]: _type = "Task" [ 703.575599] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.586200] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735243, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.761560] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735241, 'name': ReconfigVM_Task, 'duration_secs': 0.543386} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.761560] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Reconfigured VM instance instance-00000023 to attach disk [datastore1] d6137c80-0c09-4655-b264-472753b4fa9c/d6137c80-0c09-4655-b264-472753b4fa9c.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 703.761929] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8a137ae-482b-48a9-8995-0a6176c6ffe0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.768894] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 703.768894] env[63028]: value = "task-2735244" [ 703.768894] env[63028]: _type = "Task" [ 703.768894] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.779507] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735244, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.822860] env[63028]: DEBUG oslo_concurrency.lockutils [req-7524e1a4-666f-4c69-b148-2c7e7584932e req-1b37d834-36ec-41c9-98ba-ea67e3eb9953 service nova] Releasing lock "refresh_cache-c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.896575] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.832s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.897863] env[63028]: DEBUG nova.compute.manager [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 703.901318] env[63028]: DEBUG oslo_concurrency.lockutils [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.805s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.901623] env[63028]: DEBUG nova.objects.instance [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lazy-loading 'resources' on Instance uuid 86d5d264-7a7a-434b-a1c4-e9a004c0a034 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 703.982358] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735242, 'name': Destroy_Task, 'duration_secs': 0.341733} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.982680] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Destroyed the VM [ 703.983162] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 703.983600] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-85852990-9365-4012-911b-8471ce6d58f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.994523] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 703.994523] env[63028]: value = "task-2735245" [ 703.994523] env[63028]: _type = "Task" [ 703.994523] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.000536] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735245, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.088715] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735243, 'name': ReconfigVM_Task, 'duration_secs': 0.305661} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.089352] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Reconfigured VM instance instance-00000021 to attach disk [datastore2] c3014718-1064-4ab9-9600-86490489ee4b/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 704.090504] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d36c1e-e63e-40d4-a7fb-151a7a9ad656 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.124343] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f259c0e-bb87-4fd1-807c-f1c187f4058d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.142810] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 704.142810] env[63028]: value = "task-2735246" [ 704.142810] env[63028]: _type = "Task" [ 704.142810] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.152256] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735246, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.281161] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735244, 'name': Rename_Task, 'duration_secs': 0.151262} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.281528] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 704.281930] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-347c8ef4-ca34-410f-bd38-97d87d448e4d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.288327] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 704.288327] env[63028]: value = "task-2735247" [ 704.288327] env[63028]: _type = "Task" [ 704.288327] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.303678] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735247, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.399665] env[63028]: DEBUG nova.network.neutron [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Successfully updated port: 8f389aaf-a460-47ed-862a-e45d83b3d9e3 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 704.408736] env[63028]: DEBUG nova.compute.utils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 704.415292] env[63028]: DEBUG nova.compute.manager [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 704.415549] env[63028]: DEBUG nova.network.neutron [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 704.451163] env[63028]: DEBUG nova.compute.manager [req-3b3e49cb-e804-4c87-b4da-6734d3b8edf9 req-59319e9d-d981-4459-95e9-97cdb33a6fe2 service nova] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Received event network-vif-plugged-8f389aaf-a460-47ed-862a-e45d83b3d9e3 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 704.451163] env[63028]: DEBUG oslo_concurrency.lockutils [req-3b3e49cb-e804-4c87-b4da-6734d3b8edf9 req-59319e9d-d981-4459-95e9-97cdb33a6fe2 service nova] Acquiring lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.451346] env[63028]: DEBUG oslo_concurrency.lockutils [req-3b3e49cb-e804-4c87-b4da-6734d3b8edf9 req-59319e9d-d981-4459-95e9-97cdb33a6fe2 service nova] Lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.452386] env[63028]: DEBUG oslo_concurrency.lockutils [req-3b3e49cb-e804-4c87-b4da-6734d3b8edf9 req-59319e9d-d981-4459-95e9-97cdb33a6fe2 service nova] Lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.452386] env[63028]: DEBUG nova.compute.manager [req-3b3e49cb-e804-4c87-b4da-6734d3b8edf9 req-59319e9d-d981-4459-95e9-97cdb33a6fe2 service nova] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] No waiting events found dispatching network-vif-plugged-8f389aaf-a460-47ed-862a-e45d83b3d9e3 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 704.452386] env[63028]: WARNING nova.compute.manager [req-3b3e49cb-e804-4c87-b4da-6734d3b8edf9 req-59319e9d-d981-4459-95e9-97cdb33a6fe2 service nova] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Received unexpected event network-vif-plugged-8f389aaf-a460-47ed-862a-e45d83b3d9e3 for instance with vm_state building and task_state spawning. [ 704.499296] env[63028]: DEBUG nova.policy [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a91edc39f874b408ba4ab84ea388846', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1a2ecd6338148e6a90a71bf1fc5c778', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 704.506903] env[63028]: DEBUG oslo_vmware.api [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735245, 'name': RemoveSnapshot_Task, 'duration_secs': 0.452749} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.509900] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 704.510221] env[63028]: INFO nova.compute.manager [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Took 12.69 seconds to snapshot the instance on the hypervisor. [ 704.653329] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735246, 'name': ReconfigVM_Task, 'duration_secs': 0.161744} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.654435] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 704.654435] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9facc215-5dbf-4fc3-a3ec-77f8a94c87fa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.659904] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 704.659904] env[63028]: value = "task-2735248" [ 704.659904] env[63028]: _type = "Task" [ 704.659904] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.660360] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "b9db75ba-6832-45e8-8faf-d1cdaa7dabdd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.660609] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "b9db75ba-6832-45e8-8faf-d1cdaa7dabdd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.660815] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "b9db75ba-6832-45e8-8faf-d1cdaa7dabdd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.660996] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "b9db75ba-6832-45e8-8faf-d1cdaa7dabdd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.661176] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "b9db75ba-6832-45e8-8faf-d1cdaa7dabdd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.669246] env[63028]: INFO nova.compute.manager [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Terminating instance [ 704.677618] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735248, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.799408] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735247, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.899334] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "da88308f-ce62-40af-adae-e38aa506bdd9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.899458] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "da88308f-ce62-40af-adae-e38aa506bdd9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.904527] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "refresh_cache-6e0959ac-8fca-47eb-b501-b50a3e9f025a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.904527] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired lock "refresh_cache-6e0959ac-8fca-47eb-b501-b50a3e9f025a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.904527] env[63028]: DEBUG nova.network.neutron [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 704.917386] env[63028]: DEBUG nova.compute.manager [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 704.921214] env[63028]: DEBUG nova.network.neutron [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Successfully created port: 1dbc74bf-5582-4e9b-a07e-8ba016f027e5 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 705.014618] env[63028]: DEBUG nova.compute.manager [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Instance disappeared during snapshot {{(pid=63028) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 705.035746] env[63028]: DEBUG nova.compute.manager [None req-dbb99f7d-3b35-4de5-9bb1-7d10dfbf7abf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Image not found during clean up de8a25c2-f41a-4e1f-8b5d-6ab5bcd87f55 {{(pid=63028) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 705.057402] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1bc71f-4e5b-4c40-9039-de29706e8572 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.065359] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eff81b7-102a-4a46-86cf-e0bdcb6d5ff3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.107695] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7c3998-5808-4e7f-89e9-4d45d4c8ba2f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.116376] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2f6bb5-d87d-43ab-987a-def1ad2f47e4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.129374] env[63028]: DEBUG nova.compute.provider_tree [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.179746] env[63028]: DEBUG oslo_vmware.api [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735248, 'name': PowerOnVM_Task, 'duration_secs': 0.4375} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.180022] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 705.183337] env[63028]: DEBUG nova.compute.manager [None req-8ae0309b-8eab-4869-a224-a9cea9fc068e tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 705.184149] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae41f349-139b-4316-abd1-1811f9e0e071 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.187347] env[63028]: DEBUG nova.compute.manager [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 705.187561] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 705.189104] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4de9e19-3911-460c-a455-717498be0253 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.198953] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 705.199432] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0db3a83-ec96-4285-879a-f3305f1b3f78 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.277367] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 705.277367] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 705.277367] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleting the datastore file [datastore1] b9db75ba-6832-45e8-8faf-d1cdaa7dabdd {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 705.277367] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b837bc4b-7a73-4749-bf4e-fa760ab81f62 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.284458] env[63028]: DEBUG oslo_vmware.api [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 705.284458] env[63028]: value = "task-2735250" [ 705.284458] env[63028]: _type = "Task" [ 705.284458] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.292455] env[63028]: DEBUG oslo_vmware.api [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735250, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.299666] env[63028]: DEBUG oslo_vmware.api [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735247, 'name': PowerOnVM_Task, 'duration_secs': 0.544698} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.299939] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 705.300588] env[63028]: INFO nova.compute.manager [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Took 10.96 seconds to spawn the instance on the hypervisor. [ 705.300784] env[63028]: DEBUG nova.compute.manager [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 705.301568] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f317806-eefb-4ee2-b7a0-bdc2a3a74a32 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.474537] env[63028]: DEBUG nova.network.neutron [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.633409] env[63028]: DEBUG nova.scheduler.client.report [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 705.656648] env[63028]: DEBUG nova.network.neutron [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Updating instance_info_cache with network_info: [{"id": "8f389aaf-a460-47ed-862a-e45d83b3d9e3", "address": "fa:16:3e:fe:2b:36", "network": {"id": "f8ae7dcc-bddb-41e4-b0eb-ae1fe84858f2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1381703882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1a2ecd6338148e6a90a71bf1fc5c778", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f389aaf-a4", "ovs_interfaceid": "8f389aaf-a460-47ed-862a-e45d83b3d9e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.668608] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "cd11b318-9158-4f1d-8aa8-1c9d565bb5d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.668889] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "cd11b318-9158-4f1d-8aa8-1c9d565bb5d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.798930] env[63028]: DEBUG oslo_vmware.api [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735250, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.465429} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.799671] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 705.799671] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 705.799671] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 705.800000] env[63028]: INFO nova.compute.manager [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Took 0.61 seconds to destroy the instance on the hypervisor. [ 705.800099] env[63028]: DEBUG oslo.service.loopingcall [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 705.800349] env[63028]: DEBUG nova.compute.manager [-] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 705.800472] env[63028]: DEBUG nova.network.neutron [-] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 705.821062] env[63028]: INFO nova.compute.manager [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Took 55.94 seconds to build instance. [ 705.931509] env[63028]: DEBUG nova.compute.manager [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 705.960677] env[63028]: DEBUG nova.virt.hardware [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 705.960927] env[63028]: DEBUG nova.virt.hardware [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 705.961107] env[63028]: DEBUG nova.virt.hardware [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 705.961292] env[63028]: DEBUG nova.virt.hardware [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 705.961438] env[63028]: DEBUG nova.virt.hardware [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 705.961585] env[63028]: DEBUG nova.virt.hardware [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 705.961790] env[63028]: DEBUG nova.virt.hardware [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 705.962549] env[63028]: DEBUG nova.virt.hardware [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 705.962749] env[63028]: DEBUG nova.virt.hardware [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 705.962925] env[63028]: DEBUG nova.virt.hardware [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 705.963120] env[63028]: DEBUG nova.virt.hardware [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 705.964158] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999973d2-979c-459f-99c2-8015ee1b60c3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.975656] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d0bf40-1469-4f17-9d19-c478573cff6f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.132214] env[63028]: DEBUG nova.compute.manager [req-6568ca5b-ada1-4c45-8371-3e31e9437b59 req-5f41f36b-a0c0-46e2-bce4-da2aa1ca0b25 service nova] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Received event network-vif-deleted-49500b3e-3a86-4a0a-94ed-762a86a78124 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 706.132435] env[63028]: INFO nova.compute.manager [req-6568ca5b-ada1-4c45-8371-3e31e9437b59 req-5f41f36b-a0c0-46e2-bce4-da2aa1ca0b25 service nova] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Neutron deleted interface 49500b3e-3a86-4a0a-94ed-762a86a78124; detaching it from the instance and deleting it from the info cache [ 706.132612] env[63028]: DEBUG nova.network.neutron [req-6568ca5b-ada1-4c45-8371-3e31e9437b59 req-5f41f36b-a0c0-46e2-bce4-da2aa1ca0b25 service nova] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.138441] env[63028]: DEBUG oslo_concurrency.lockutils [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.238s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.140807] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 38.259s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.159604] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Releasing lock "refresh_cache-6e0959ac-8fca-47eb-b501-b50a3e9f025a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.159604] env[63028]: DEBUG nova.compute.manager [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Instance network_info: |[{"id": "8f389aaf-a460-47ed-862a-e45d83b3d9e3", "address": "fa:16:3e:fe:2b:36", "network": {"id": "f8ae7dcc-bddb-41e4-b0eb-ae1fe84858f2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1381703882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1a2ecd6338148e6a90a71bf1fc5c778", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f389aaf-a4", "ovs_interfaceid": "8f389aaf-a460-47ed-862a-e45d83b3d9e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 706.159815] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:2b:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00a15667-7ca5-4dc9-be92-164750d87988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f389aaf-a460-47ed-862a-e45d83b3d9e3', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 706.166937] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Creating folder: Project (a1a2ecd6338148e6a90a71bf1fc5c778). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.168141] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aad3c2e0-925e-482f-989b-4b52e2a56186 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.171019] env[63028]: INFO nova.scheduler.client.report [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Deleted allocations for instance 86d5d264-7a7a-434b-a1c4-e9a004c0a034 [ 706.180027] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Created folder: Project (a1a2ecd6338148e6a90a71bf1fc5c778) in parent group-v550570. [ 706.180265] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Creating folder: Instances. Parent ref: group-v550686. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.180457] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90d0452a-b2bb-4f9b-8980-8187fe7ea3a5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.189360] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Created folder: Instances in parent group-v550686. [ 706.189600] env[63028]: DEBUG oslo.service.loopingcall [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 706.189784] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 706.189977] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6441a7dc-3769-4607-8f42-e865360830fa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.209669] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 706.209669] env[63028]: value = "task-2735253" [ 706.209669] env[63028]: _type = "Task" [ 706.209669] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.218641] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735253, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.322109] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fa19bdb-228b-42ec-83d1-3d3179548869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "d6137c80-0c09-4655-b264-472753b4fa9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.778s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.571460] env[63028]: DEBUG nova.compute.manager [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Received event network-changed-8f389aaf-a460-47ed-862a-e45d83b3d9e3 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 706.571663] env[63028]: DEBUG nova.compute.manager [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Refreshing instance network info cache due to event network-changed-8f389aaf-a460-47ed-862a-e45d83b3d9e3. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 706.571867] env[63028]: DEBUG oslo_concurrency.lockutils [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] Acquiring lock "refresh_cache-6e0959ac-8fca-47eb-b501-b50a3e9f025a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.572021] env[63028]: DEBUG oslo_concurrency.lockutils [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] Acquired lock "refresh_cache-6e0959ac-8fca-47eb-b501-b50a3e9f025a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.572183] env[63028]: DEBUG nova.network.neutron [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Refreshing network info cache for port 8f389aaf-a460-47ed-862a-e45d83b3d9e3 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 706.587442] env[63028]: DEBUG nova.network.neutron [-] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.636218] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-389e4a4b-452b-4401-8c4d-7778ad1335fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.656662] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51742cab-dcbf-4e6e-b2bb-0ece75aab41b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.679938] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Applying migration context for instance f3277886-4498-45c6-be68-e71d8293dc00 as it has an incoming, in-progress migration 423e4fa8-4060-4a21-aa81-5c4eef279e90. Migration status is confirming {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 706.683316] env[63028]: INFO nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating resource usage from migration 423e4fa8-4060-4a21-aa81-5c4eef279e90 [ 706.691094] env[63028]: DEBUG oslo_concurrency.lockutils [None req-10107302-ac52-488d-9e70-d2e14676970a tempest-DeleteServersAdminTestJSON-430766428 tempest-DeleteServersAdminTestJSON-430766428-project-member] Lock "86d5d264-7a7a-434b-a1c4-e9a004c0a034" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.115s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.717050] env[63028]: DEBUG nova.compute.manager [req-6568ca5b-ada1-4c45-8371-3e31e9437b59 req-5f41f36b-a0c0-46e2-bce4-da2aa1ca0b25 service nova] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Detach interface failed, port_id=49500b3e-3a86-4a0a-94ed-762a86a78124, reason: Instance b9db75ba-6832-45e8-8faf-d1cdaa7dabdd could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 706.718313] env[63028]: DEBUG nova.network.neutron [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Successfully updated port: 1dbc74bf-5582-4e9b-a07e-8ba016f027e5 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 706.720290] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 413f7fea-452b-463f-b396-cdd29e8ffa91 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 706.720437] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 706.720559] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance d663c2df-ae54-4c50-a70f-e2180700c700 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.720692] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 5a330ed9-c106-49f2-b524-a424e717b5ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.720806] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 4a782483-c24e-44db-b697-856c69cc4a13 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.720919] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.721102] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 8f6beda6-0fc6-4d85-9f27-f4248adda8f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.721223] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 0e07a6cd-8c99-408d-95ba-63f7839c327f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.721331] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 5a340e31-678c-437e-aa4e-07d5d9f4334f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.721438] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance ba57ed92-aaef-460c-bd45-d0cbe09e4615 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.721544] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance b9d9fe4e-438c-4f68-b011-9eb9e10a381c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.721647] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance b9db75ba-6832-45e8-8faf-d1cdaa7dabdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.721781] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 706.721902] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.722017] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance c3014718-1064-4ab9-9600-86490489ee4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.722137] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance c0db2b2a-9c06-409c-b48b-a0d5c127f2dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.722245] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Migration 423e4fa8-4060-4a21-aa81-5c4eef279e90 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 706.722350] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance f3277886-4498-45c6-be68-e71d8293dc00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.722453] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance d6137c80-0c09-4655-b264-472753b4fa9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.722555] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 6e0959ac-8fca-47eb-b501-b50a3e9f025a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.722656] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 1af19279-e75b-4ec5-91f1-a0a101b229b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 706.733941] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735253, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.824917] env[63028]: DEBUG nova.compute.manager [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 707.058616] env[63028]: INFO nova.compute.manager [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Unrescuing [ 707.058889] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "refresh_cache-c3014718-1064-4ab9-9600-86490489ee4b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.059088] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquired lock "refresh_cache-c3014718-1064-4ab9-9600-86490489ee4b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.059302] env[63028]: DEBUG nova.network.neutron [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 707.090851] env[63028]: INFO nova.compute.manager [-] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Took 1.29 seconds to deallocate network for instance. [ 707.165476] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Acquiring lock "7e914e49-0d70-4024-940b-ad2a15e9dff7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.165714] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Lock "7e914e49-0d70-4024-940b-ad2a15e9dff7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.229268] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 22713da1-ae53-4bbe-ae55-2490440cbd87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 707.231294] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "refresh_cache-1af19279-e75b-4ec5-91f1-a0a101b229b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.232160] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired lock "refresh_cache-1af19279-e75b-4ec5-91f1-a0a101b229b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.232160] env[63028]: DEBUG nova.network.neutron [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 707.232699] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735253, 'name': CreateVM_Task, 'duration_secs': 0.639998} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.233377] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 707.234020] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.234191] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.234507] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 707.234753] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6418e7e1-2344-4ae0-8e1d-cf4c9d4b56dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.242622] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 707.242622] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fc95b5-13e7-127a-0d1b-057c0031c119" [ 707.242622] env[63028]: _type = "Task" [ 707.242622] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.251760] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fc95b5-13e7-127a-0d1b-057c0031c119, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.318443] env[63028]: DEBUG nova.network.neutron [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Updated VIF entry in instance network info cache for port 8f389aaf-a460-47ed-862a-e45d83b3d9e3. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 707.318874] env[63028]: DEBUG nova.network.neutron [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Updating instance_info_cache with network_info: [{"id": "8f389aaf-a460-47ed-862a-e45d83b3d9e3", "address": "fa:16:3e:fe:2b:36", "network": {"id": "f8ae7dcc-bddb-41e4-b0eb-ae1fe84858f2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1381703882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1a2ecd6338148e6a90a71bf1fc5c778", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f389aaf-a4", "ovs_interfaceid": "8f389aaf-a460-47ed-862a-e45d83b3d9e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.350826] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.597883] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.734761] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 0d96ba8e-b46b-48ae-957c-cdc49762c395 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 707.756350] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fc95b5-13e7-127a-0d1b-057c0031c119, 'name': SearchDatastore_Task, 'duration_secs': 0.011064} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.756669] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.758806] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 707.758806] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.758806] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.758806] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 707.758806] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f250b87-01dc-46df-b5bf-75ec2a2275ca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.767882] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 707.768064] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 707.768806] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44e7fb94-9743-4059-8d64-41662a4e54b8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.777070] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 707.777070] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ec925a-f387-62e5-06ce-387ee204294c" [ 707.777070] env[63028]: _type = "Task" [ 707.777070] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.777796] env[63028]: DEBUG nova.network.neutron [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.788340] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ec925a-f387-62e5-06ce-387ee204294c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.821464] env[63028]: DEBUG oslo_concurrency.lockutils [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] Releasing lock "refresh_cache-6e0959ac-8fca-47eb-b501-b50a3e9f025a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.821932] env[63028]: DEBUG nova.compute.manager [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Received event network-changed-7d007428-6d28-49a8-aa26-6b6ec99613c2 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 707.821932] env[63028]: DEBUG nova.compute.manager [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Refreshing instance network info cache due to event network-changed-7d007428-6d28-49a8-aa26-6b6ec99613c2. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 707.822099] env[63028]: DEBUG oslo_concurrency.lockutils [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] Acquiring lock "refresh_cache-d6137c80-0c09-4655-b264-472753b4fa9c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.822223] env[63028]: DEBUG oslo_concurrency.lockutils [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] Acquired lock "refresh_cache-d6137c80-0c09-4655-b264-472753b4fa9c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.822387] env[63028]: DEBUG nova.network.neutron [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Refreshing network info cache for port 7d007428-6d28-49a8-aa26-6b6ec99613c2 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 707.828832] env[63028]: DEBUG nova.network.neutron [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Updating instance_info_cache with network_info: [{"id": "66d0224f-a46e-422c-8658-e35f39636307", "address": "fa:16:3e:1d:a5:fe", "network": {"id": "350f4b14-d211-48c8-b1dd-06a0dd5805d1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-987689362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b3d1798e23e64325a3b6f699cd27d98f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66d0224f-a4", "ovs_interfaceid": "66d0224f-a46e-422c-8658-e35f39636307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.979259] env[63028]: DEBUG nova.network.neutron [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Updating instance_info_cache with network_info: [{"id": "1dbc74bf-5582-4e9b-a07e-8ba016f027e5", "address": "fa:16:3e:2e:4c:85", "network": {"id": "f8ae7dcc-bddb-41e4-b0eb-ae1fe84858f2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1381703882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1a2ecd6338148e6a90a71bf1fc5c778", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dbc74bf-55", "ovs_interfaceid": "1dbc74bf-5582-4e9b-a07e-8ba016f027e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.241132] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance e2d39c43-6666-4fda-b8e2-485399c59e46 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.294328] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ec925a-f387-62e5-06ce-387ee204294c, 'name': SearchDatastore_Task, 'duration_secs': 0.01365} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.295118] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5702b0a-26e5-4524-954a-9b7e672737b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.303852] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 708.303852] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527bd964-dd06-c022-4e32-e714d19b9f4f" [ 708.303852] env[63028]: _type = "Task" [ 708.303852] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.311576] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527bd964-dd06-c022-4e32-e714d19b9f4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.332340] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Releasing lock "refresh_cache-c3014718-1064-4ab9-9600-86490489ee4b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.333010] env[63028]: DEBUG nova.objects.instance [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lazy-loading 'flavor' on Instance uuid c3014718-1064-4ab9-9600-86490489ee4b {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 708.482589] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Releasing lock "refresh_cache-1af19279-e75b-4ec5-91f1-a0a101b229b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.482589] env[63028]: DEBUG nova.compute.manager [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Instance network_info: |[{"id": "1dbc74bf-5582-4e9b-a07e-8ba016f027e5", "address": "fa:16:3e:2e:4c:85", "network": {"id": "f8ae7dcc-bddb-41e4-b0eb-ae1fe84858f2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1381703882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1a2ecd6338148e6a90a71bf1fc5c778", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dbc74bf-55", "ovs_interfaceid": "1dbc74bf-5582-4e9b-a07e-8ba016f027e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 708.482909] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:4c:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00a15667-7ca5-4dc9-be92-164750d87988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1dbc74bf-5582-4e9b-a07e-8ba016f027e5', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 708.490062] env[63028]: DEBUG oslo.service.loopingcall [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 708.490283] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 708.490504] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6572cf95-755d-438e-ada9-073aff4c01d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.513691] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 708.513691] env[63028]: value = "task-2735254" [ 708.513691] env[63028]: _type = "Task" [ 708.513691] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.521704] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735254, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.541568] env[63028]: DEBUG nova.network.neutron [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Updated VIF entry in instance network info cache for port 7d007428-6d28-49a8-aa26-6b6ec99613c2. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 708.541952] env[63028]: DEBUG nova.network.neutron [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Updating instance_info_cache with network_info: [{"id": "7d007428-6d28-49a8-aa26-6b6ec99613c2", "address": "fa:16:3e:11:ca:39", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d007428-6d", "ovs_interfaceid": "7d007428-6d28-49a8-aa26-6b6ec99613c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.598528] env[63028]: DEBUG nova.compute.manager [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Received event network-vif-plugged-1dbc74bf-5582-4e9b-a07e-8ba016f027e5 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 708.598886] env[63028]: DEBUG oslo_concurrency.lockutils [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] Acquiring lock "1af19279-e75b-4ec5-91f1-a0a101b229b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.599233] env[63028]: DEBUG oslo_concurrency.lockutils [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] Lock "1af19279-e75b-4ec5-91f1-a0a101b229b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.599351] env[63028]: DEBUG oslo_concurrency.lockutils [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] Lock "1af19279-e75b-4ec5-91f1-a0a101b229b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.599549] env[63028]: DEBUG nova.compute.manager [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] No waiting events found dispatching network-vif-plugged-1dbc74bf-5582-4e9b-a07e-8ba016f027e5 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 708.599815] env[63028]: WARNING nova.compute.manager [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Received unexpected event network-vif-plugged-1dbc74bf-5582-4e9b-a07e-8ba016f027e5 for instance with vm_state building and task_state spawning. [ 708.600042] env[63028]: DEBUG nova.compute.manager [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Received event network-changed-1dbc74bf-5582-4e9b-a07e-8ba016f027e5 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 708.600348] env[63028]: DEBUG nova.compute.manager [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Refreshing instance network info cache due to event network-changed-1dbc74bf-5582-4e9b-a07e-8ba016f027e5. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 708.600607] env[63028]: DEBUG oslo_concurrency.lockutils [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] Acquiring lock "refresh_cache-1af19279-e75b-4ec5-91f1-a0a101b229b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.600786] env[63028]: DEBUG oslo_concurrency.lockutils [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] Acquired lock "refresh_cache-1af19279-e75b-4ec5-91f1-a0a101b229b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.600950] env[63028]: DEBUG nova.network.neutron [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Refreshing network info cache for port 1dbc74bf-5582-4e9b-a07e-8ba016f027e5 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 708.744677] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance c06813c4-472d-4bf9-84ec-0d01306bcd48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.814386] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527bd964-dd06-c022-4e32-e714d19b9f4f, 'name': SearchDatastore_Task, 'duration_secs': 0.043722} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.814699] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.814937] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 6e0959ac-8fca-47eb-b501-b50a3e9f025a/6e0959ac-8fca-47eb-b501-b50a3e9f025a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 708.815696] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87439a66-6a52-4580-989a-8deab518298b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.824344] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 708.824344] env[63028]: value = "task-2735255" [ 708.824344] env[63028]: _type = "Task" [ 708.824344] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.832214] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.840016] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3a51f0-2fb3-42c8-b608-ec45e116bf15 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.860799] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 708.861203] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8461dbc-5884-4f9d-b869-99a0dfd03b11 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.867720] env[63028]: DEBUG oslo_vmware.api [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 708.867720] env[63028]: value = "task-2735256" [ 708.867720] env[63028]: _type = "Task" [ 708.867720] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.876295] env[63028]: DEBUG oslo_vmware.api [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.025940] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735254, 'name': CreateVM_Task, 'duration_secs': 0.509845} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.026286] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 709.027438] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.027714] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.028228] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 709.028592] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0810c0ab-6132-4441-a7eb-c2a997678b4b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.034266] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 709.034266] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526e2736-5f18-59d2-6c5a-becf4c8924ce" [ 709.034266] env[63028]: _type = "Task" [ 709.034266] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.044812] env[63028]: DEBUG oslo_concurrency.lockutils [req-0586cd73-2b2e-4869-b2e8-46decabca87b req-1439faf1-a4e9-4f9a-a392-6c9f2e04445d service nova] Releasing lock "refresh_cache-d6137c80-0c09-4655-b264-472753b4fa9c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.045376] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526e2736-5f18-59d2-6c5a-becf4c8924ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.247785] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 352ac7c3-17a8-4d7e-a66f-47ea7614892c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 709.337022] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735255, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.359646] env[63028]: DEBUG nova.network.neutron [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Updated VIF entry in instance network info cache for port 1dbc74bf-5582-4e9b-a07e-8ba016f027e5. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 709.360096] env[63028]: DEBUG nova.network.neutron [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Updating instance_info_cache with network_info: [{"id": "1dbc74bf-5582-4e9b-a07e-8ba016f027e5", "address": "fa:16:3e:2e:4c:85", "network": {"id": "f8ae7dcc-bddb-41e4-b0eb-ae1fe84858f2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1381703882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1a2ecd6338148e6a90a71bf1fc5c778", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dbc74bf-55", "ovs_interfaceid": "1dbc74bf-5582-4e9b-a07e-8ba016f027e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.380074] env[63028]: DEBUG oslo_vmware.api [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735256, 'name': PowerOffVM_Task, 'duration_secs': 0.210372} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.380074] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 709.384937] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Reconfiguring VM instance instance-00000021 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 709.386585] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbcf68dc-fb3e-45ba-8039-4e3e5ca0478b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.404020] env[63028]: DEBUG oslo_vmware.api [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 709.404020] env[63028]: value = "task-2735257" [ 709.404020] env[63028]: _type = "Task" [ 709.404020] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.412130] env[63028]: DEBUG oslo_vmware.api [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735257, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.545777] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526e2736-5f18-59d2-6c5a-becf4c8924ce, 'name': SearchDatastore_Task, 'duration_secs': 0.012959} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.546138] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.546395] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 709.546628] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.546774] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.546948] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 709.547217] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bfff63ca-f194-412a-ab69-b70aaf4c93ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.556385] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 709.556671] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 709.557333] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a47320df-3d08-453b-972c-0092de98e36b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.562366] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 709.562366] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a30583-2412-beb9-14a4-8314198a9e15" [ 709.562366] env[63028]: _type = "Task" [ 709.562366] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.570021] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a30583-2412-beb9-14a4-8314198a9e15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.751367] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 50e4934b-b9b1-4887-b5d1-95a37fbf4c41 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 709.836453] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735255, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612545} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.836716] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 6e0959ac-8fca-47eb-b501-b50a3e9f025a/6e0959ac-8fca-47eb-b501-b50a3e9f025a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 709.836966] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 709.837181] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-416b2baf-4360-4ec8-9082-dea3f76796f3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.843380] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 709.843380] env[63028]: value = "task-2735258" [ 709.843380] env[63028]: _type = "Task" [ 709.843380] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.851028] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735258, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.862546] env[63028]: DEBUG oslo_concurrency.lockutils [req-277b8eac-6fa9-49b7-8b90-34f2ffac9435 req-98b02ae8-113d-4c2f-8ecd-dcb2ad52a4ee service nova] Releasing lock "refresh_cache-1af19279-e75b-4ec5-91f1-a0a101b229b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.914047] env[63028]: DEBUG oslo_vmware.api [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735257, 'name': ReconfigVM_Task, 'duration_secs': 0.20887} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.914399] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Reconfigured VM instance instance-00000021 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 709.914599] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 709.914849] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0cdfbc7a-6a7c-4100-8714-68b6294619bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.920496] env[63028]: DEBUG oslo_vmware.api [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 709.920496] env[63028]: value = "task-2735259" [ 709.920496] env[63028]: _type = "Task" [ 709.920496] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.928993] env[63028]: DEBUG oslo_vmware.api [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735259, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.073923] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a30583-2412-beb9-14a4-8314198a9e15, 'name': SearchDatastore_Task, 'duration_secs': 0.036118} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.074736] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7042031b-9427-4db0-a3d6-419d23f4fc49 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.079997] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 710.079997] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5271cc3d-63df-be17-33c0-4ceb46794d6d" [ 710.079997] env[63028]: _type = "Task" [ 710.079997] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.088771] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5271cc3d-63df-be17-33c0-4ceb46794d6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.255261] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 600195de-ceb4-41a6-9ade-dda8b898e4db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 710.353282] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735258, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074119} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.353558] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.354368] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a562da-4eaf-4960-a791-e63577a22627 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.376562] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 6e0959ac-8fca-47eb-b501-b50a3e9f025a/6e0959ac-8fca-47eb-b501-b50a3e9f025a.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.376862] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3c853d5-d157-4c03-8eb9-b44fddb912a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.396942] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 710.396942] env[63028]: value = "task-2735260" [ 710.396942] env[63028]: _type = "Task" [ 710.396942] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.405711] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735260, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.429562] env[63028]: DEBUG oslo_vmware.api [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735259, 'name': PowerOnVM_Task, 'duration_secs': 0.378199} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.429908] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 710.431042] env[63028]: DEBUG nova.compute.manager [None req-c61784cf-a7ca-46a7-bbac-a6be4b6e7b12 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 710.431042] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2ff3a9-ab3b-433d-9716-d4f28b685463 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.590492] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5271cc3d-63df-be17-33c0-4ceb46794d6d, 'name': SearchDatastore_Task, 'duration_secs': 0.056501} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.590780] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.591041] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 1af19279-e75b-4ec5-91f1-a0a101b229b2/1af19279-e75b-4ec5-91f1-a0a101b229b2.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 710.591300] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8fbe9ff0-fd9a-4869-855e-05aa44585cbb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.598750] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 710.598750] env[63028]: value = "task-2735261" [ 710.598750] env[63028]: _type = "Task" [ 710.598750] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.606412] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735261, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.759412] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 1316318e-8dcf-4ac2-b40a-6a3ab6964997 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 710.912055] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735260, 'name': ReconfigVM_Task, 'duration_secs': 0.385292} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.912055] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 6e0959ac-8fca-47eb-b501-b50a3e9f025a/6e0959ac-8fca-47eb-b501-b50a3e9f025a.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 710.912326] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a7737cb7-fcc8-489f-b86d-3f780229fd38 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.921998] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 710.921998] env[63028]: value = "task-2735262" [ 710.921998] env[63028]: _type = "Task" [ 710.921998] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.931153] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735262, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.028366] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "15326f55-2db8-47c3-b1fd-ce8ba1174c79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.028723] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "15326f55-2db8-47c3-b1fd-ce8ba1174c79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.109124] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735261, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509319} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.109429] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 1af19279-e75b-4ec5-91f1-a0a101b229b2/1af19279-e75b-4ec5-91f1-a0a101b229b2.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 711.109645] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 711.109928] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a8cba1e-8930-4d22-bbfb-236d63d67d40 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.115883] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 711.115883] env[63028]: value = "task-2735263" [ 711.115883] env[63028]: _type = "Task" [ 711.115883] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.123940] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735263, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.263886] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 5982cd5d-abf1-42d4-bb44-8d79de599f11 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 711.432953] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735262, 'name': Rename_Task, 'duration_secs': 0.298015} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.433559] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 711.434247] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-351871c8-0e2b-45a3-8904-6ed37c74c583 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.440473] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 711.440473] env[63028]: value = "task-2735264" [ 711.440473] env[63028]: _type = "Task" [ 711.440473] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.448196] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735264, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.627286] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735263, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.182894} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.627748] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 711.628958] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c542618-e999-4e27-91c0-cbf0c6bd2681 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.650791] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 1af19279-e75b-4ec5-91f1-a0a101b229b2/1af19279-e75b-4ec5-91f1-a0a101b229b2.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 711.651035] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40b02f09-147b-4c3a-a6e4-7eac8a63dc20 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.669911] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 711.669911] env[63028]: value = "task-2735265" [ 711.669911] env[63028]: _type = "Task" [ 711.669911] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.678455] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735265, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.767523] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 3e45e7f3-a34f-4eab-9fff-1c874c832e2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 711.861203] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "c3014718-1064-4ab9-9600-86490489ee4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.861203] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "c3014718-1064-4ab9-9600-86490489ee4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.861428] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "c3014718-1064-4ab9-9600-86490489ee4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.861479] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "c3014718-1064-4ab9-9600-86490489ee4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.861645] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "c3014718-1064-4ab9-9600-86490489ee4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.863864] env[63028]: INFO nova.compute.manager [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Terminating instance [ 711.950086] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735264, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.183096] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735265, 'name': ReconfigVM_Task, 'duration_secs': 0.327767} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.183479] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 1af19279-e75b-4ec5-91f1-a0a101b229b2/1af19279-e75b-4ec5-91f1-a0a101b229b2.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 712.184304] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f408686b-ee5b-42fc-a052-6977b4018b35 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.191354] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 712.191354] env[63028]: value = "task-2735266" [ 712.191354] env[63028]: _type = "Task" [ 712.191354] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.201516] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735266, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.270686] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance a4b0d948-d950-414a-b23f-faefa5ab038c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.295128] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Acquiring lock "4a782483-c24e-44db-b697-856c69cc4a13" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.295409] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Lock "4a782483-c24e-44db-b697-856c69cc4a13" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.370967] env[63028]: DEBUG nova.compute.manager [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 712.371108] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 712.372317] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a84abd4-73f0-41f4-9006-a66882f84d7c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.380161] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 712.380455] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31385a03-c0dc-4166-a350-ba62d6a2d346 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.386624] env[63028]: DEBUG oslo_vmware.api [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 712.386624] env[63028]: value = "task-2735267" [ 712.386624] env[63028]: _type = "Task" [ 712.386624] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.394704] env[63028]: DEBUG oslo_vmware.api [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735267, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.449926] env[63028]: DEBUG oslo_vmware.api [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735264, 'name': PowerOnVM_Task, 'duration_secs': 0.934148} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.450276] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 712.450531] env[63028]: INFO nova.compute.manager [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Took 9.35 seconds to spawn the instance on the hypervisor. [ 712.450730] env[63028]: DEBUG nova.compute.manager [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 712.451513] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0866d30-b14a-4a55-aef4-098c1088eeef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.700725] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735266, 'name': Rename_Task, 'duration_secs': 0.146205} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.700994] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 712.701255] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a7d957b-4b1d-4c4e-80d5-be7b0b76d850 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.707250] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 712.707250] env[63028]: value = "task-2735268" [ 712.707250] env[63028]: _type = "Task" [ 712.707250] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.714311] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735268, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.773820] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance af87f1a5-b413-4b26-be91-474ad1f73df8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.798311] env[63028]: DEBUG nova.compute.utils [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 712.898103] env[63028]: DEBUG oslo_vmware.api [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735267, 'name': PowerOffVM_Task, 'duration_secs': 0.284554} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.898353] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 712.898498] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 712.898752] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bce634ec-b446-4ddc-8974-94e200445359 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.967779] env[63028]: INFO nova.compute.manager [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Took 54.61 seconds to build instance. [ 713.208506] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 713.208724] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 713.208972] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Deleting the datastore file [datastore2] c3014718-1064-4ab9-9600-86490489ee4b {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 713.212464] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3c7564a-ec39-4dd8-ac52-503388d02cb5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.219998] env[63028]: DEBUG oslo_vmware.api [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735268, 'name': PowerOnVM_Task, 'duration_secs': 0.499128} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.221286] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 713.221484] env[63028]: INFO nova.compute.manager [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Took 7.29 seconds to spawn the instance on the hypervisor. [ 713.221663] env[63028]: DEBUG nova.compute.manager [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 713.222026] env[63028]: DEBUG oslo_vmware.api [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 713.222026] env[63028]: value = "task-2735270" [ 713.222026] env[63028]: _type = "Task" [ 713.222026] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.222675] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0430f8af-ba67-4ddb-b14c-fa780590a124 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.236345] env[63028]: DEBUG oslo_vmware.api [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735270, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.277361] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance da88308f-ce62-40af-adae-e38aa506bdd9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 713.301591] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Lock "4a782483-c24e-44db-b697-856c69cc4a13" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.470514] env[63028]: DEBUG oslo_concurrency.lockutils [None req-404d9a50-0510-4bb1-afa5-ba09e7496ecd tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.613s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.734549] env[63028]: DEBUG oslo_vmware.api [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.407371} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.734819] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 713.735015] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 713.735206] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 713.735385] env[63028]: INFO nova.compute.manager [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Took 1.36 seconds to destroy the instance on the hypervisor. [ 713.735625] env[63028]: DEBUG oslo.service.loopingcall [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 713.735835] env[63028]: DEBUG nova.compute.manager [-] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 713.735899] env[63028]: DEBUG nova.network.neutron [-] [instance: c3014718-1064-4ab9-9600-86490489ee4b] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 713.746223] env[63028]: INFO nova.compute.manager [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Took 49.95 seconds to build instance. [ 713.780448] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance cd11b318-9158-4f1d-8aa8-1c9d565bb5d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 713.976551] env[63028]: DEBUG nova.compute.manager [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 714.243686] env[63028]: DEBUG nova.compute.manager [req-36fbaf9b-335b-481c-aa09-0f00886c669a req-f48e505d-8ef8-4bcf-9445-cf1deffb6c0b service nova] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Received event network-vif-deleted-66d0224f-a46e-422c-8658-e35f39636307 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 714.243892] env[63028]: INFO nova.compute.manager [req-36fbaf9b-335b-481c-aa09-0f00886c669a req-f48e505d-8ef8-4bcf-9445-cf1deffb6c0b service nova] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Neutron deleted interface 66d0224f-a46e-422c-8658-e35f39636307; detaching it from the instance and deleting it from the info cache [ 714.244078] env[63028]: DEBUG nova.network.neutron [req-36fbaf9b-335b-481c-aa09-0f00886c669a req-f48e505d-8ef8-4bcf-9445-cf1deffb6c0b service nova] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.248162] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eabfacb7-034a-4283-9259-08f5af8f4706 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "1af19279-e75b-4ec5-91f1-a0a101b229b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.325s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.282759] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 7e914e49-0d70-4024-940b-ad2a15e9dff7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 714.283105] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 714.283304] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4032MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 714.376295] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Acquiring lock "4a782483-c24e-44db-b697-856c69cc4a13" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.376690] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Lock "4a782483-c24e-44db-b697-856c69cc4a13" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.376995] env[63028]: INFO nova.compute.manager [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Attaching volume c445674d-5769-47ff-8906-73aeab7bd17c to /dev/sdb [ 714.408626] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c5cfbb-a88c-4489-b4c3-60c90c196106 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.417916] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8c21b7-3890-44c9-bcdb-c9339b804a98 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.431365] env[63028]: DEBUG nova.virt.block_device [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Updating existing volume attachment record: 2ef85e89-b686-4e78-b556-bcb03f73dd63 {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 714.493246] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.722078] env[63028]: DEBUG nova.network.neutron [-] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.748858] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7da58a1d-82c3-4a5a-bb7f-fcccffb32db5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.750887] env[63028]: DEBUG nova.compute.manager [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 714.761397] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c814d8-f380-43f6-8c03-f0ed7d321059 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.772162] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922417fc-ca1c-42b3-a136-fc29e446f084 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.779790] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef7bc48-9e9f-48ab-8f20-92791d9cc8ea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.822591] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87fdeea-a087-4072-a5f0-585fed05f11c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.825468] env[63028]: DEBUG nova.compute.manager [req-36fbaf9b-335b-481c-aa09-0f00886c669a req-f48e505d-8ef8-4bcf-9445-cf1deffb6c0b service nova] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Detach interface failed, port_id=66d0224f-a46e-422c-8658-e35f39636307, reason: Instance c3014718-1064-4ab9-9600-86490489ee4b could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 714.830769] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8a802d-4a82-422f-8d3f-10cb93a88054 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.844652] env[63028]: DEBUG nova.compute.provider_tree [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 715.224606] env[63028]: INFO nova.compute.manager [-] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Took 1.49 seconds to deallocate network for instance. [ 715.269742] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.349045] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 715.732631] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.853828] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 715.854350] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.713s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.854609] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.668s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.856251] env[63028]: INFO nova.compute.claims [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.271426] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31771a8a-75bd-4525-8640-bd61aeab0d2f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.279236] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ef6113-281f-465f-90f8-bbbb693234c7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.311049] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91deff9b-f909-4259-ae86-40d05fbc9775 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.318225] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbba8be-1dd5-4ace-8fc4-c9dafab152b7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.331391] env[63028]: DEBUG nova.compute.provider_tree [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.835252] env[63028]: DEBUG nova.scheduler.client.report [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 718.340277] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.485s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.340649] env[63028]: DEBUG nova.compute.manager [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 718.343224] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.411s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.343365] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.345364] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.131s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.347120] env[63028]: INFO nova.compute.claims [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.367761] env[63028]: INFO nova.scheduler.client.report [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Deleted allocations for instance 413f7fea-452b-463f-b396-cdd29e8ffa91 [ 718.851928] env[63028]: DEBUG nova.compute.utils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 718.855137] env[63028]: DEBUG nova.compute.manager [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 718.855137] env[63028]: DEBUG nova.network.neutron [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 718.876135] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a7566947-5ead-4518-803d-4b9abe5bf090 tempest-FloatingIPsAssociationTestJSON-1306702303 tempest-FloatingIPsAssociationTestJSON-1306702303-project-member] Lock "413f7fea-452b-463f-b396-cdd29e8ffa91" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.973s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.923464] env[63028]: DEBUG nova.policy [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '745b9f85a6054b3d9e58afe09f2fa1c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33476042b8604f34b66edbef2aa2d189', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 718.982063] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 718.982331] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550692', 'volume_id': 'c445674d-5769-47ff-8906-73aeab7bd17c', 'name': 'volume-c445674d-5769-47ff-8906-73aeab7bd17c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4a782483-c24e-44db-b697-856c69cc4a13', 'attached_at': '', 'detached_at': '', 'volume_id': 'c445674d-5769-47ff-8906-73aeab7bd17c', 'serial': 'c445674d-5769-47ff-8906-73aeab7bd17c'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 718.983274] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf69211-8481-4b10-ab32-00d0a675f8b0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.000440] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04de11f5-4772-40b5-ba15-50821f330e6d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.028489] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] volume-c445674d-5769-47ff-8906-73aeab7bd17c/volume-c445674d-5769-47ff-8906-73aeab7bd17c.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 719.029266] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-622f107b-4371-4f3e-a285-48eb262c3faa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.049705] env[63028]: DEBUG oslo_vmware.api [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Waiting for the task: (returnval){ [ 719.049705] env[63028]: value = "task-2735275" [ 719.049705] env[63028]: _type = "Task" [ 719.049705] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.066115] env[63028]: DEBUG oslo_vmware.api [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Task: {'id': task-2735275, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.362052] env[63028]: DEBUG nova.compute.manager [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 719.390607] env[63028]: DEBUG nova.network.neutron [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Successfully created port: 13472b65-beb6-4fe6-82c2-f1a6516c48a3 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.562130] env[63028]: DEBUG oslo_vmware.api [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Task: {'id': task-2735275, 'name': ReconfigVM_Task, 'duration_secs': 0.371674} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.562130] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Reconfigured VM instance instance-00000011 to attach disk [datastore1] volume-c445674d-5769-47ff-8906-73aeab7bd17c/volume-c445674d-5769-47ff-8906-73aeab7bd17c.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 719.566617] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b295b38-99ea-4bbc-b766-fb4b5e1820b2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.583408] env[63028]: DEBUG oslo_vmware.api [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Waiting for the task: (returnval){ [ 719.583408] env[63028]: value = "task-2735276" [ 719.583408] env[63028]: _type = "Task" [ 719.583408] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.591425] env[63028]: DEBUG oslo_vmware.api [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Task: {'id': task-2735276, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.926479] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5603de-b078-43dc-8a8c-f57f144912f7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.933585] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd1fa74-68bc-46b9-a9f2-dc82657a93bf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.964282] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a2d3f7-4159-4a94-a05c-2f5a2d3bfefc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.971847] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b33e14-48fe-45eb-8ff9-8af07bdc63cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.984711] env[63028]: DEBUG nova.compute.provider_tree [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.095970] env[63028]: DEBUG oslo_vmware.api [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Task: {'id': task-2735276, 'name': ReconfigVM_Task, 'duration_secs': 0.139778} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.097653] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550692', 'volume_id': 'c445674d-5769-47ff-8906-73aeab7bd17c', 'name': 'volume-c445674d-5769-47ff-8906-73aeab7bd17c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4a782483-c24e-44db-b697-856c69cc4a13', 'attached_at': '', 'detached_at': '', 'volume_id': 'c445674d-5769-47ff-8906-73aeab7bd17c', 'serial': 'c445674d-5769-47ff-8906-73aeab7bd17c'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 720.373484] env[63028]: DEBUG nova.compute.manager [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 720.403638] env[63028]: DEBUG nova.virt.hardware [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 720.403937] env[63028]: DEBUG nova.virt.hardware [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.404477] env[63028]: DEBUG nova.virt.hardware [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 720.404477] env[63028]: DEBUG nova.virt.hardware [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.404477] env[63028]: DEBUG nova.virt.hardware [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 720.404640] env[63028]: DEBUG nova.virt.hardware [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 720.404869] env[63028]: DEBUG nova.virt.hardware [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 720.405042] env[63028]: DEBUG nova.virt.hardware [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 720.405283] env[63028]: DEBUG nova.virt.hardware [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 720.405464] env[63028]: DEBUG nova.virt.hardware [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 720.405645] env[63028]: DEBUG nova.virt.hardware [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 720.407015] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68d234a-22e4-40ee-a563-5234c9b6acc4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.416538] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43263e4-bcc2-42cc-9833-213530ad8cea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.488340] env[63028]: DEBUG nova.scheduler.client.report [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 720.994490] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.649s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.995023] env[63028]: DEBUG nova.compute.manager [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 720.998068] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.168s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.998319] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.001643] env[63028]: DEBUG oslo_concurrency.lockutils [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 38.940s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.022924] env[63028]: DEBUG nova.compute.manager [req-103fd9b0-c6e9-4a2b-8a0b-292710eaed1f req-ce4cf0d9-f85e-45f5-8c77-7d9060c267df service nova] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Received event network-vif-plugged-13472b65-beb6-4fe6-82c2-f1a6516c48a3 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 721.023186] env[63028]: DEBUG oslo_concurrency.lockutils [req-103fd9b0-c6e9-4a2b-8a0b-292710eaed1f req-ce4cf0d9-f85e-45f5-8c77-7d9060c267df service nova] Acquiring lock "22713da1-ae53-4bbe-ae55-2490440cbd87-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.023186] env[63028]: DEBUG oslo_concurrency.lockutils [req-103fd9b0-c6e9-4a2b-8a0b-292710eaed1f req-ce4cf0d9-f85e-45f5-8c77-7d9060c267df service nova] Lock "22713da1-ae53-4bbe-ae55-2490440cbd87-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.023526] env[63028]: DEBUG oslo_concurrency.lockutils [req-103fd9b0-c6e9-4a2b-8a0b-292710eaed1f req-ce4cf0d9-f85e-45f5-8c77-7d9060c267df service nova] Lock "22713da1-ae53-4bbe-ae55-2490440cbd87-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.023526] env[63028]: DEBUG nova.compute.manager [req-103fd9b0-c6e9-4a2b-8a0b-292710eaed1f req-ce4cf0d9-f85e-45f5-8c77-7d9060c267df service nova] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] No waiting events found dispatching network-vif-plugged-13472b65-beb6-4fe6-82c2-f1a6516c48a3 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 721.024468] env[63028]: WARNING nova.compute.manager [req-103fd9b0-c6e9-4a2b-8a0b-292710eaed1f req-ce4cf0d9-f85e-45f5-8c77-7d9060c267df service nova] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Received unexpected event network-vif-plugged-13472b65-beb6-4fe6-82c2-f1a6516c48a3 for instance with vm_state building and task_state spawning. [ 721.040807] env[63028]: INFO nova.scheduler.client.report [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Deleted allocations for instance 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8 [ 721.129846] env[63028]: DEBUG nova.network.neutron [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Successfully updated port: 13472b65-beb6-4fe6-82c2-f1a6516c48a3 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 721.141431] env[63028]: DEBUG nova.objects.instance [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Lazy-loading 'flavor' on Instance uuid 4a782483-c24e-44db-b697-856c69cc4a13 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 721.505607] env[63028]: DEBUG nova.compute.utils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 721.511162] env[63028]: DEBUG nova.compute.manager [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 721.511319] env[63028]: DEBUG nova.network.neutron [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 721.553049] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4bb642d9-e1c2-4c22-8c2f-a6e62dba7907 tempest-ServerShowV247Test-962587091 tempest-ServerShowV247Test-962587091-project-member] Lock "2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.230s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.566088] env[63028]: DEBUG nova.policy [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a91edc39f874b408ba4ab84ea388846', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1a2ecd6338148e6a90a71bf1fc5c778', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 721.633511] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Acquiring lock "refresh_cache-22713da1-ae53-4bbe-ae55-2490440cbd87" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.633511] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Acquired lock "refresh_cache-22713da1-ae53-4bbe-ae55-2490440cbd87" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.633511] env[63028]: DEBUG nova.network.neutron [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 721.647409] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1d707cf-9dbd-4413-86de-e670b7c7af54 tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Lock "4a782483-c24e-44db-b697-856c69cc4a13" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.269s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.001177] env[63028]: DEBUG nova.network.neutron [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Successfully created port: 54686a8a-3d85-4fee-89ee-c097c3ec620f {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 722.013239] env[63028]: DEBUG nova.compute.manager [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 722.061070] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44b9539-a193-414d-b5c0-20fa6e77df30 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.069115] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6035dc95-c95f-49f8-9437-bb78c9f93ef1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.101413] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0741e556-ce41-4b37-9751-bb2800bc22eb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.109158] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b94b1f-3bd2-4780-a161-4d4a4ebcae3d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.122635] env[63028]: DEBUG nova.compute.provider_tree [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.166012] env[63028]: DEBUG nova.network.neutron [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.173906] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Acquiring lock "4a782483-c24e-44db-b697-856c69cc4a13" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.174143] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Lock "4a782483-c24e-44db-b697-856c69cc4a13" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.296769] env[63028]: DEBUG nova.network.neutron [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Updating instance_info_cache with network_info: [{"id": "13472b65-beb6-4fe6-82c2-f1a6516c48a3", "address": "fa:16:3e:46:10:0d", "network": {"id": "eb5233d7-9c93-4197-b8b4-a78b13075c93", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1627501809-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33476042b8604f34b66edbef2aa2d189", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13472b65-be", "ovs_interfaceid": "13472b65-beb6-4fe6-82c2-f1a6516c48a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.625888] env[63028]: DEBUG nova.scheduler.client.report [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 722.678165] env[63028]: INFO nova.compute.manager [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Detaching volume c445674d-5769-47ff-8906-73aeab7bd17c [ 722.724909] env[63028]: INFO nova.virt.block_device [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Attempting to driver detach volume c445674d-5769-47ff-8906-73aeab7bd17c from mountpoint /dev/sdb [ 722.725212] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 722.725451] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550692', 'volume_id': 'c445674d-5769-47ff-8906-73aeab7bd17c', 'name': 'volume-c445674d-5769-47ff-8906-73aeab7bd17c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4a782483-c24e-44db-b697-856c69cc4a13', 'attached_at': '', 'detached_at': '', 'volume_id': 'c445674d-5769-47ff-8906-73aeab7bd17c', 'serial': 'c445674d-5769-47ff-8906-73aeab7bd17c'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 722.726718] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ade998e-6dbe-4964-a74d-98c8c39b875a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.751106] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2e5862-7125-49b3-9424-7dcc39110a4c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.758590] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de422c98-f2a3-4c6e-8db0-2096f9290ac9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.780242] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed7a48e-a10d-4b0e-a141-397ac6efe991 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.798252] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] The volume has not been displaced from its original location: [datastore1] volume-c445674d-5769-47ff-8906-73aeab7bd17c/volume-c445674d-5769-47ff-8906-73aeab7bd17c.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 722.804292] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Reconfiguring VM instance instance-00000011 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 722.804823] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Releasing lock "refresh_cache-22713da1-ae53-4bbe-ae55-2490440cbd87" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.805265] env[63028]: DEBUG nova.compute.manager [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Instance network_info: |[{"id": "13472b65-beb6-4fe6-82c2-f1a6516c48a3", "address": "fa:16:3e:46:10:0d", "network": {"id": "eb5233d7-9c93-4197-b8b4-a78b13075c93", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1627501809-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33476042b8604f34b66edbef2aa2d189", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13472b65-be", "ovs_interfaceid": "13472b65-beb6-4fe6-82c2-f1a6516c48a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 722.805513] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8292022-bff9-47db-a42a-ebde8e9a431e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.819251] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:10:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9aa05ef8-c7bb-4af5-983f-bfa0f3f88223', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13472b65-beb6-4fe6-82c2-f1a6516c48a3', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 722.826968] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Creating folder: Project (33476042b8604f34b66edbef2aa2d189). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 722.827331] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ad20ebb-e31a-4414-9205-10a05b811014 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.835224] env[63028]: DEBUG oslo_vmware.api [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Waiting for the task: (returnval){ [ 722.835224] env[63028]: value = "task-2735278" [ 722.835224] env[63028]: _type = "Task" [ 722.835224] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.839530] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Created folder: Project (33476042b8604f34b66edbef2aa2d189) in parent group-v550570. [ 722.839720] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Creating folder: Instances. Parent ref: group-v550693. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 722.840316] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf35c390-0e16-4855-a964-f07dbf29c787 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.845431] env[63028]: DEBUG oslo_vmware.api [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Task: {'id': task-2735278, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.855251] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Created folder: Instances in parent group-v550693. [ 722.855251] env[63028]: DEBUG oslo.service.loopingcall [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 722.855251] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 722.855583] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9122d53f-9da4-4c88-bec6-51e1d454abc9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.874426] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 722.874426] env[63028]: value = "task-2735280" [ 722.874426] env[63028]: _type = "Task" [ 722.874426] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.881843] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735280, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.021790] env[63028]: DEBUG nova.compute.manager [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 723.047337] env[63028]: DEBUG nova.virt.hardware [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 723.047623] env[63028]: DEBUG nova.virt.hardware [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 723.047722] env[63028]: DEBUG nova.virt.hardware [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 723.047958] env[63028]: DEBUG nova.virt.hardware [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 723.048114] env[63028]: DEBUG nova.virt.hardware [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 723.048221] env[63028]: DEBUG nova.virt.hardware [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 723.048453] env[63028]: DEBUG nova.virt.hardware [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 723.048617] env[63028]: DEBUG nova.virt.hardware [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 723.048956] env[63028]: DEBUG nova.virt.hardware [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 723.049097] env[63028]: DEBUG nova.virt.hardware [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 723.049285] env[63028]: DEBUG nova.virt.hardware [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 723.050166] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b907eef-9d62-4d59-8043-c5e98106d35b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.060974] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b1d439-9813-4430-84aa-92aae3d6b984 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.065660] env[63028]: DEBUG nova.compute.manager [req-d491a5b2-7f9a-48dd-a8f3-ba51519c562d req-a1e51f63-0b1e-4fe3-a941-42be3044a39b service nova] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Received event network-changed-13472b65-beb6-4fe6-82c2-f1a6516c48a3 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 723.065930] env[63028]: DEBUG nova.compute.manager [req-d491a5b2-7f9a-48dd-a8f3-ba51519c562d req-a1e51f63-0b1e-4fe3-a941-42be3044a39b service nova] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Refreshing instance network info cache due to event network-changed-13472b65-beb6-4fe6-82c2-f1a6516c48a3. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 723.066214] env[63028]: DEBUG oslo_concurrency.lockutils [req-d491a5b2-7f9a-48dd-a8f3-ba51519c562d req-a1e51f63-0b1e-4fe3-a941-42be3044a39b service nova] Acquiring lock "refresh_cache-22713da1-ae53-4bbe-ae55-2490440cbd87" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.066214] env[63028]: DEBUG oslo_concurrency.lockutils [req-d491a5b2-7f9a-48dd-a8f3-ba51519c562d req-a1e51f63-0b1e-4fe3-a941-42be3044a39b service nova] Acquired lock "refresh_cache-22713da1-ae53-4bbe-ae55-2490440cbd87" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.066371] env[63028]: DEBUG nova.network.neutron [req-d491a5b2-7f9a-48dd-a8f3-ba51519c562d req-a1e51f63-0b1e-4fe3-a941-42be3044a39b service nova] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Refreshing network info cache for port 13472b65-beb6-4fe6-82c2-f1a6516c48a3 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 723.345152] env[63028]: DEBUG oslo_vmware.api [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Task: {'id': task-2735278, 'name': ReconfigVM_Task, 'duration_secs': 0.242678} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.345573] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Reconfigured VM instance instance-00000011 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 723.350755] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9166333-ebdc-4e46-b499-35d5a3c6a874 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.365401] env[63028]: DEBUG oslo_vmware.api [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Waiting for the task: (returnval){ [ 723.365401] env[63028]: value = "task-2735281" [ 723.365401] env[63028]: _type = "Task" [ 723.365401] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.374198] env[63028]: DEBUG oslo_vmware.api [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Task: {'id': task-2735281, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.389185] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735280, 'name': CreateVM_Task, 'duration_secs': 0.416515} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.389366] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 723.390234] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.390335] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.390646] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 723.390897] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93dbc3ac-badb-428f-be8d-af73a2f7647e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.395230] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Waiting for the task: (returnval){ [ 723.395230] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b0485c-9a42-6a1e-f582-c4760490bbac" [ 723.395230] env[63028]: _type = "Task" [ 723.395230] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.405022] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b0485c-9a42-6a1e-f582-c4760490bbac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.641309] env[63028]: DEBUG oslo_concurrency.lockutils [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.639s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.644523] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.331s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.646506] env[63028]: INFO nova.compute.claims [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.834576] env[63028]: DEBUG nova.network.neutron [req-d491a5b2-7f9a-48dd-a8f3-ba51519c562d req-a1e51f63-0b1e-4fe3-a941-42be3044a39b service nova] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Updated VIF entry in instance network info cache for port 13472b65-beb6-4fe6-82c2-f1a6516c48a3. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 723.834948] env[63028]: DEBUG nova.network.neutron [req-d491a5b2-7f9a-48dd-a8f3-ba51519c562d req-a1e51f63-0b1e-4fe3-a941-42be3044a39b service nova] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Updating instance_info_cache with network_info: [{"id": "13472b65-beb6-4fe6-82c2-f1a6516c48a3", "address": "fa:16:3e:46:10:0d", "network": {"id": "eb5233d7-9c93-4197-b8b4-a78b13075c93", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1627501809-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33476042b8604f34b66edbef2aa2d189", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13472b65-be", "ovs_interfaceid": "13472b65-beb6-4fe6-82c2-f1a6516c48a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.879636] env[63028]: DEBUG oslo_vmware.api [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Task: {'id': task-2735281, 'name': ReconfigVM_Task, 'duration_secs': 0.169323} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.879636] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550692', 'volume_id': 'c445674d-5769-47ff-8906-73aeab7bd17c', 'name': 'volume-c445674d-5769-47ff-8906-73aeab7bd17c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4a782483-c24e-44db-b697-856c69cc4a13', 'attached_at': '', 'detached_at': '', 'volume_id': 'c445674d-5769-47ff-8906-73aeab7bd17c', 'serial': 'c445674d-5769-47ff-8906-73aeab7bd17c'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 723.906837] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b0485c-9a42-6a1e-f582-c4760490bbac, 'name': SearchDatastore_Task, 'duration_secs': 0.012049} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.907282] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.907609] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 723.907903] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.908146] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.908378] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 723.908893] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4850d2d-5c10-473c-8efd-e48517db50f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.919962] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 723.920174] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 723.920912] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67936288-e82a-408e-b1ec-e10e0b38d555 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.926973] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Waiting for the task: (returnval){ [ 723.926973] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b261d2-c53c-34a1-c169-729873250d72" [ 723.926973] env[63028]: _type = "Task" [ 723.926973] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.938078] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b261d2-c53c-34a1-c169-729873250d72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.223421] env[63028]: INFO nova.scheduler.client.report [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Deleted allocation for migration 423e4fa8-4060-4a21-aa81-5c4eef279e90 [ 724.322795] env[63028]: DEBUG nova.network.neutron [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Successfully updated port: 54686a8a-3d85-4fee-89ee-c097c3ec620f {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 724.340983] env[63028]: DEBUG oslo_concurrency.lockutils [req-d491a5b2-7f9a-48dd-a8f3-ba51519c562d req-a1e51f63-0b1e-4fe3-a941-42be3044a39b service nova] Releasing lock "refresh_cache-22713da1-ae53-4bbe-ae55-2490440cbd87" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.443121] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b261d2-c53c-34a1-c169-729873250d72, 'name': SearchDatastore_Task, 'duration_secs': 0.01042} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.443121] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a14db7d-196d-4cdd-849e-a4249057a2b9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.449856] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Waiting for the task: (returnval){ [ 724.449856] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52439f08-4975-75cc-c4fe-7f8563d13cdb" [ 724.449856] env[63028]: _type = "Task" [ 724.449856] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.457170] env[63028]: DEBUG nova.objects.instance [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Lazy-loading 'flavor' on Instance uuid 4a782483-c24e-44db-b697-856c69cc4a13 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 724.465059] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52439f08-4975-75cc-c4fe-7f8563d13cdb, 'name': SearchDatastore_Task, 'duration_secs': 0.009054} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.466584] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.466881] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 22713da1-ae53-4bbe-ae55-2490440cbd87/22713da1-ae53-4bbe-ae55-2490440cbd87.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 724.467209] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76d475a4-4854-4ffa-914a-f443991254fa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.475076] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Waiting for the task: (returnval){ [ 724.475076] env[63028]: value = "task-2735282" [ 724.475076] env[63028]: _type = "Task" [ 724.475076] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.483847] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735282, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.730207] env[63028]: DEBUG oslo_concurrency.lockutils [None req-71315007-5b90-4f7a-bfbb-cb9db9d67454 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "f3277886-4498-45c6-be68-e71d8293dc00" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 45.783s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.824575] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "refresh_cache-0d96ba8e-b46b-48ae-957c-cdc49762c395" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.824733] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired lock "refresh_cache-0d96ba8e-b46b-48ae-957c-cdc49762c395" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.824908] env[63028]: DEBUG nova.network.neutron [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.986065] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735282, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495376} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.986888] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 22713da1-ae53-4bbe-ae55-2490440cbd87/22713da1-ae53-4bbe-ae55-2490440cbd87.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 724.986888] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 724.987137] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-94fc2284-e4bd-4e24-8e68-2359383dace6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.995629] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Waiting for the task: (returnval){ [ 724.995629] env[63028]: value = "task-2735283" [ 724.995629] env[63028]: _type = "Task" [ 724.995629] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.010132] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735283, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.214281] env[63028]: DEBUG nova.compute.manager [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Received event network-vif-plugged-54686a8a-3d85-4fee-89ee-c097c3ec620f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 725.214490] env[63028]: DEBUG oslo_concurrency.lockutils [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] Acquiring lock "0d96ba8e-b46b-48ae-957c-cdc49762c395-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.214703] env[63028]: DEBUG oslo_concurrency.lockutils [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] Lock "0d96ba8e-b46b-48ae-957c-cdc49762c395-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.215164] env[63028]: DEBUG oslo_concurrency.lockutils [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] Lock "0d96ba8e-b46b-48ae-957c-cdc49762c395-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.215164] env[63028]: DEBUG nova.compute.manager [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] No waiting events found dispatching network-vif-plugged-54686a8a-3d85-4fee-89ee-c097c3ec620f {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 725.215358] env[63028]: WARNING nova.compute.manager [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Received unexpected event network-vif-plugged-54686a8a-3d85-4fee-89ee-c097c3ec620f for instance with vm_state building and task_state spawning. [ 725.215358] env[63028]: DEBUG nova.compute.manager [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Received event network-changed-54686a8a-3d85-4fee-89ee-c097c3ec620f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 725.215486] env[63028]: DEBUG nova.compute.manager [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Refreshing instance network info cache due to event network-changed-54686a8a-3d85-4fee-89ee-c097c3ec620f. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 725.215647] env[63028]: DEBUG oslo_concurrency.lockutils [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] Acquiring lock "refresh_cache-0d96ba8e-b46b-48ae-957c-cdc49762c395" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.260364] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbb7a29-b345-4f30-9024-9121f3fa8896 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.268489] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09931b1-752a-4e7f-ace4-9bea4a4850c5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.301438] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345a90ea-ffca-4c09-9d81-09c952264980 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.309903] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a533f0-fb48-4f66-8247-0bca32dcf162 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.323334] env[63028]: DEBUG nova.compute.provider_tree [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.368740] env[63028]: DEBUG nova.network.neutron [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.465931] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8cfa6dd5-8817-469e-bc3f-cc350e4000dd tempest-VolumesAssistedSnapshotsTest-1060928770 tempest-VolumesAssistedSnapshotsTest-1060928770-project-admin] Lock "4a782483-c24e-44db-b697-856c69cc4a13" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.292s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.508771] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735283, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072759} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.509704] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 725.510891] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca42eeba-49d5-464a-9b26-ab05b15b1bfd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.544122] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 22713da1-ae53-4bbe-ae55-2490440cbd87/22713da1-ae53-4bbe-ae55-2490440cbd87.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 725.546484] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82763c1e-8708-4a55-9da5-da09aabe47c8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.568677] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Waiting for the task: (returnval){ [ 725.568677] env[63028]: value = "task-2735285" [ 725.568677] env[63028]: _type = "Task" [ 725.568677] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.578518] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735285, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.602643] env[63028]: DEBUG nova.network.neutron [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Updating instance_info_cache with network_info: [{"id": "54686a8a-3d85-4fee-89ee-c097c3ec620f", "address": "fa:16:3e:f8:fb:72", "network": {"id": "f8ae7dcc-bddb-41e4-b0eb-ae1fe84858f2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1381703882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1a2ecd6338148e6a90a71bf1fc5c778", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54686a8a-3d", "ovs_interfaceid": "54686a8a-3d85-4fee-89ee-c097c3ec620f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.827518] env[63028]: DEBUG nova.scheduler.client.report [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 726.080523] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735285, 'name': ReconfigVM_Task, 'duration_secs': 0.308051} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.080866] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 22713da1-ae53-4bbe-ae55-2490440cbd87/22713da1-ae53-4bbe-ae55-2490440cbd87.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 726.081573] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e9cac6d-4758-4267-aaed-b1ee0b31493c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.087943] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Waiting for the task: (returnval){ [ 726.087943] env[63028]: value = "task-2735286" [ 726.087943] env[63028]: _type = "Task" [ 726.087943] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.098188] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735286, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.105056] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Releasing lock "refresh_cache-0d96ba8e-b46b-48ae-957c-cdc49762c395" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.105449] env[63028]: DEBUG nova.compute.manager [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Instance network_info: |[{"id": "54686a8a-3d85-4fee-89ee-c097c3ec620f", "address": "fa:16:3e:f8:fb:72", "network": {"id": "f8ae7dcc-bddb-41e4-b0eb-ae1fe84858f2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1381703882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1a2ecd6338148e6a90a71bf1fc5c778", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54686a8a-3d", "ovs_interfaceid": "54686a8a-3d85-4fee-89ee-c097c3ec620f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 726.105749] env[63028]: DEBUG oslo_concurrency.lockutils [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] Acquired lock "refresh_cache-0d96ba8e-b46b-48ae-957c-cdc49762c395" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.105923] env[63028]: DEBUG nova.network.neutron [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Refreshing network info cache for port 54686a8a-3d85-4fee-89ee-c097c3ec620f {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 726.107166] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:fb:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00a15667-7ca5-4dc9-be92-164750d87988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54686a8a-3d85-4fee-89ee-c097c3ec620f', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 726.115737] env[63028]: DEBUG oslo.service.loopingcall [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 726.116704] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 726.117982] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e090b79-f330-4b81-82a0-fdbf791e6e61 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.138427] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 726.138427] env[63028]: value = "task-2735287" [ 726.138427] env[63028]: _type = "Task" [ 726.138427] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.147334] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735287, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.333706] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.689s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.334564] env[63028]: DEBUG nova.compute.manager [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 726.339239] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.662s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.341397] env[63028]: INFO nova.compute.claims [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 726.603016] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735286, 'name': Rename_Task, 'duration_secs': 0.150143} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.603016] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 726.603016] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a192360-3951-4dd9-8ed3-52d349c858e9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.608440] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Waiting for the task: (returnval){ [ 726.608440] env[63028]: value = "task-2735288" [ 726.608440] env[63028]: _type = "Task" [ 726.608440] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.626573] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735288, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.651230] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735287, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.849053] env[63028]: DEBUG nova.compute.utils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 726.850262] env[63028]: DEBUG nova.compute.manager [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 726.850578] env[63028]: DEBUG nova.network.neutron [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 726.903349] env[63028]: DEBUG nova.policy [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a5a0263e379494aa906d35f2fe1202f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97060d5fb7e8454eadaf5dc9b426a248', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 727.124074] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735288, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.152910] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735287, 'name': CreateVM_Task, 'duration_secs': 0.561414} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.152910] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 727.152910] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.153060] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.153451] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 727.153720] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e477d9bf-fdea-47b1-a4d4-4f46a51a0b48 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.159353] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 727.159353] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a08d2e-77a1-fdfc-132a-2970884dfb59" [ 727.159353] env[63028]: _type = "Task" [ 727.159353] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.169213] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a08d2e-77a1-fdfc-132a-2970884dfb59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.285400] env[63028]: DEBUG nova.network.neutron [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Successfully created port: fbe61f8e-ac8c-487d-95a7-fa4740f61aa0 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 727.293061] env[63028]: DEBUG nova.network.neutron [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Updated VIF entry in instance network info cache for port 54686a8a-3d85-4fee-89ee-c097c3ec620f. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 727.293061] env[63028]: DEBUG nova.network.neutron [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Updating instance_info_cache with network_info: [{"id": "54686a8a-3d85-4fee-89ee-c097c3ec620f", "address": "fa:16:3e:f8:fb:72", "network": {"id": "f8ae7dcc-bddb-41e4-b0eb-ae1fe84858f2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1381703882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1a2ecd6338148e6a90a71bf1fc5c778", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54686a8a-3d", "ovs_interfaceid": "54686a8a-3d85-4fee-89ee-c097c3ec620f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.351215] env[63028]: DEBUG nova.compute.manager [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 727.621891] env[63028]: DEBUG oslo_vmware.api [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735288, 'name': PowerOnVM_Task, 'duration_secs': 0.545079} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.621891] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 727.621891] env[63028]: INFO nova.compute.manager [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Took 7.25 seconds to spawn the instance on the hypervisor. [ 727.621891] env[63028]: DEBUG nova.compute.manager [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 727.621891] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a890a28-de76-44fe-9a6c-905e966c5426 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.676028] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a08d2e-77a1-fdfc-132a-2970884dfb59, 'name': SearchDatastore_Task, 'duration_secs': 0.013377} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.676028] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.676028] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 727.676028] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.676440] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.676440] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 727.676440] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-477028af-4d18-4fb3-94f9-40dd887415a0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.685442] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 727.686089] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 727.686692] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f18d075-3b28-45cd-a956-391748019cdd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.692924] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 727.692924] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525b27a1-72c3-f1d7-6a3a-a371d79f458f" [ 727.692924] env[63028]: _type = "Task" [ 727.692924] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.702796] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525b27a1-72c3-f1d7-6a3a-a371d79f458f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.794865] env[63028]: DEBUG oslo_concurrency.lockutils [req-b8bad4f8-05ab-41a2-8d52-c11309bd807a req-114375e1-1ff0-44e6-bd3a-e060cd4cb884 service nova] Releasing lock "refresh_cache-0d96ba8e-b46b-48ae-957c-cdc49762c395" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.965287] env[63028]: DEBUG nova.network.neutron [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Successfully created port: e0e5a360-86c4-40e6-9e4a-06cd5da44d96 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 727.975714] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d24eb04-cfaf-42d5-8c5a-da0e0d25d70a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.984620] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44cd5404-ff3f-41a1-904d-931b7cf927c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.019341] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fab4a4-94da-4fca-ba9b-092c5c4fe087 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.026908] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3514aa36-eb76-4a1c-b78b-912d73cb5c30 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.041303] env[63028]: DEBUG nova.compute.provider_tree [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.142173] env[63028]: INFO nova.compute.manager [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Took 57.97 seconds to build instance. [ 728.204889] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525b27a1-72c3-f1d7-6a3a-a371d79f458f, 'name': SearchDatastore_Task, 'duration_secs': 0.0097} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.205904] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be9f476c-ccf8-42ad-acb1-7167e9b60af6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.212564] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 728.212564] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a99891-29bf-4511-3bf0-b5850eecd7e3" [ 728.212564] env[63028]: _type = "Task" [ 728.212564] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.221081] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a99891-29bf-4511-3bf0-b5850eecd7e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.368128] env[63028]: DEBUG nova.network.neutron [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Successfully created port: d4e7f627-9b53-4218-b7e5-a5c06ee4d53e {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 728.369155] env[63028]: DEBUG nova.compute.manager [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 728.396336] env[63028]: DEBUG nova.virt.hardware [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 728.396336] env[63028]: DEBUG nova.virt.hardware [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.396336] env[63028]: DEBUG nova.virt.hardware [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 728.396336] env[63028]: DEBUG nova.virt.hardware [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.396533] env[63028]: DEBUG nova.virt.hardware [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 728.396533] env[63028]: DEBUG nova.virt.hardware [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 728.396533] env[63028]: DEBUG nova.virt.hardware [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 728.396533] env[63028]: DEBUG nova.virt.hardware [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 728.396533] env[63028]: DEBUG nova.virt.hardware [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 728.396916] env[63028]: DEBUG nova.virt.hardware [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 728.397279] env[63028]: DEBUG nova.virt.hardware [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 728.398333] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3742493b-b643-48a4-b603-349ebccae10a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.408869] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87ea4da-8fbf-4827-8cbc-c60c8de74b87 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.544391] env[63028]: DEBUG nova.scheduler.client.report [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 728.643015] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81aea495-dd34-4a7a-b7ed-552eea3a13d1 tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Lock "22713da1-ae53-4bbe-ae55-2490440cbd87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.720s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.728880] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a99891-29bf-4511-3bf0-b5850eecd7e3, 'name': SearchDatastore_Task, 'duration_secs': 0.023914} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.728880] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.728880] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 0d96ba8e-b46b-48ae-957c-cdc49762c395/0d96ba8e-b46b-48ae-957c-cdc49762c395.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 728.729215] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75d4bbcd-9ff3-4de8-93f6-dc10cad860f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.736691] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 728.736691] env[63028]: value = "task-2735289" [ 728.736691] env[63028]: _type = "Task" [ 728.736691] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.746978] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735289, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.881659] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.881904] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.050976] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.712s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.051491] env[63028]: DEBUG nova.compute.manager [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 729.054385] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.067s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.057812] env[63028]: INFO nova.compute.claims [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 729.145611] env[63028]: DEBUG nova.compute.manager [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 729.247939] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735289, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.416034] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Acquiring lock "4a782483-c24e-44db-b697-856c69cc4a13" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.416239] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Lock "4a782483-c24e-44db-b697-856c69cc4a13" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.416588] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Acquiring lock "4a782483-c24e-44db-b697-856c69cc4a13-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.416711] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Lock "4a782483-c24e-44db-b697-856c69cc4a13-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.416921] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Lock "4a782483-c24e-44db-b697-856c69cc4a13-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.419304] env[63028]: INFO nova.compute.manager [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Terminating instance [ 729.562481] env[63028]: DEBUG nova.compute.utils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 729.566866] env[63028]: DEBUG nova.compute.manager [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 729.567064] env[63028]: DEBUG nova.network.neutron [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 729.634408] env[63028]: DEBUG nova.policy [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b3c3b29bb4d4c23a09527bcda019773', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b4dcaef840f940bda057d0371cdc5adb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 729.669451] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.751536] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735289, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517158} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.751536] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 0d96ba8e-b46b-48ae-957c-cdc49762c395/0d96ba8e-b46b-48ae-957c-cdc49762c395.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 729.751536] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 729.751536] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-778fdb0d-5e6a-418a-8619-27e6e85ff90d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.762167] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 729.762167] env[63028]: value = "task-2735290" [ 729.762167] env[63028]: _type = "Task" [ 729.762167] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.772253] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735290, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.926155] env[63028]: DEBUG nova.compute.manager [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 729.926155] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 729.926860] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d8e27a-64be-4fec-b4fb-1b40d42a9d7e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.935602] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 729.935895] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ec497ce-9c16-42ee-8a06-a01d77ca298e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.942847] env[63028]: DEBUG oslo_vmware.api [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Waiting for the task: (returnval){ [ 729.942847] env[63028]: value = "task-2735291" [ 729.942847] env[63028]: _type = "Task" [ 729.942847] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.953198] env[63028]: DEBUG oslo_vmware.api [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.071125] env[63028]: DEBUG nova.compute.manager [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 730.136720] env[63028]: DEBUG nova.network.neutron [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Successfully created port: e9be02f8-7ea6-45eb-a1cb-65fb95285caf {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.285189] env[63028]: DEBUG nova.network.neutron [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Successfully updated port: fbe61f8e-ac8c-487d-95a7-fa4740f61aa0 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 730.295320] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735290, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.146287} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.296128] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 730.302388] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ccbc9ca-e334-4eeb-abb4-b5f01d3e0231 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.349267] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 0d96ba8e-b46b-48ae-957c-cdc49762c395/0d96ba8e-b46b-48ae-957c-cdc49762c395.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 730.354247] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c70bd160-da2c-47b7-a7f3-cf9031725ba7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.387561] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 730.387561] env[63028]: value = "task-2735292" [ 730.387561] env[63028]: _type = "Task" [ 730.387561] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.402680] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.402939] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.403172] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735292, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.449588] env[63028]: DEBUG nova.compute.manager [req-d64ca1eb-5a5b-4509-ada4-482927e5c3ec req-bf6e9726-f761-45c9-9f93-aba0ce7ef1c7 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Received event network-vif-plugged-fbe61f8e-ac8c-487d-95a7-fa4740f61aa0 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 730.449764] env[63028]: DEBUG oslo_concurrency.lockutils [req-d64ca1eb-5a5b-4509-ada4-482927e5c3ec req-bf6e9726-f761-45c9-9f93-aba0ce7ef1c7 service nova] Acquiring lock "e2d39c43-6666-4fda-b8e2-485399c59e46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.450271] env[63028]: DEBUG oslo_concurrency.lockutils [req-d64ca1eb-5a5b-4509-ada4-482927e5c3ec req-bf6e9726-f761-45c9-9f93-aba0ce7ef1c7 service nova] Lock "e2d39c43-6666-4fda-b8e2-485399c59e46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.450462] env[63028]: DEBUG oslo_concurrency.lockutils [req-d64ca1eb-5a5b-4509-ada4-482927e5c3ec req-bf6e9726-f761-45c9-9f93-aba0ce7ef1c7 service nova] Lock "e2d39c43-6666-4fda-b8e2-485399c59e46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.450797] env[63028]: DEBUG nova.compute.manager [req-d64ca1eb-5a5b-4509-ada4-482927e5c3ec req-bf6e9726-f761-45c9-9f93-aba0ce7ef1c7 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] No waiting events found dispatching network-vif-plugged-fbe61f8e-ac8c-487d-95a7-fa4740f61aa0 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 730.450797] env[63028]: WARNING nova.compute.manager [req-d64ca1eb-5a5b-4509-ada4-482927e5c3ec req-bf6e9726-f761-45c9-9f93-aba0ce7ef1c7 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Received unexpected event network-vif-plugged-fbe61f8e-ac8c-487d-95a7-fa4740f61aa0 for instance with vm_state building and task_state spawning. [ 730.459232] env[63028]: DEBUG oslo_vmware.api [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735291, 'name': PowerOffVM_Task, 'duration_secs': 0.327987} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.459232] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 730.459232] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 730.459232] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfd84fac-910a-48d1-896d-cc63d7c83d39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.539346] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 730.540101] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 730.540592] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Deleting the datastore file [datastore2] 4a782483-c24e-44db-b697-856c69cc4a13 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 730.544701] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c07c1e1-9d41-4929-9255-1aa9ed50c3e9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.554514] env[63028]: DEBUG oslo_vmware.api [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Waiting for the task: (returnval){ [ 730.554514] env[63028]: value = "task-2735294" [ 730.554514] env[63028]: _type = "Task" [ 730.554514] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.566127] env[63028]: DEBUG oslo_vmware.api [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735294, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.713504] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.713766] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.713976] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.714227] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.714491] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.716895] env[63028]: INFO nova.compute.manager [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Terminating instance [ 730.762301] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df862f08-7d91-4406-84c0-c2575add3b17 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.771673] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdeb9c9f-f590-414c-ab04-6edfe5e3dbf8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.807150] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3aa13ee-f021-486b-9061-be5ea3fb3f42 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.815875] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2f9052-35f2-4966-940b-70e85f65502e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.834186] env[63028]: DEBUG nova.compute.provider_tree [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.901347] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735292, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.066942] env[63028]: DEBUG oslo_vmware.api [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Task: {'id': task-2735294, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322445} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.068421] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 731.068618] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 731.068795] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 731.068973] env[63028]: INFO nova.compute.manager [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Took 1.14 seconds to destroy the instance on the hypervisor. [ 731.069256] env[63028]: DEBUG oslo.service.loopingcall [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 731.069491] env[63028]: DEBUG nova.compute.manager [-] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 731.069589] env[63028]: DEBUG nova.network.neutron [-] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 731.082133] env[63028]: DEBUG nova.compute.manager [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 731.107600] env[63028]: DEBUG nova.virt.hardware [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 731.107848] env[63028]: DEBUG nova.virt.hardware [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.108013] env[63028]: DEBUG nova.virt.hardware [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 731.108247] env[63028]: DEBUG nova.virt.hardware [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.108446] env[63028]: DEBUG nova.virt.hardware [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 731.108601] env[63028]: DEBUG nova.virt.hardware [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 731.108813] env[63028]: DEBUG nova.virt.hardware [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 731.108971] env[63028]: DEBUG nova.virt.hardware [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 731.110522] env[63028]: DEBUG nova.virt.hardware [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 731.110729] env[63028]: DEBUG nova.virt.hardware [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 731.110917] env[63028]: DEBUG nova.virt.hardware [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 731.111796] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81177b16-37de-492c-9da5-af63ad593efd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.120766] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0a70a0-7409-4e5f-9423-df4fd5fdc8c1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.223256] env[63028]: DEBUG nova.compute.manager [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 731.223503] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 731.224424] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9daa297-abee-4732-931a-f98b58cc0ef9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.232193] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 731.232453] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7451eb7c-949a-4f30-98fb-52ab74039e1f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.239045] env[63028]: DEBUG oslo_vmware.api [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 731.239045] env[63028]: value = "task-2735295" [ 731.239045] env[63028]: _type = "Task" [ 731.239045] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.252891] env[63028]: DEBUG oslo_vmware.api [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735295, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.340021] env[63028]: DEBUG nova.scheduler.client.report [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 731.398775] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735292, 'name': ReconfigVM_Task, 'duration_secs': 0.838812} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.399255] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 0d96ba8e-b46b-48ae-957c-cdc49762c395/0d96ba8e-b46b-48ae-957c-cdc49762c395.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 731.400557] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-455a3803-4ba5-43e0-9d11-bdac33635621 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.410288] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 731.410288] env[63028]: value = "task-2735296" [ 731.410288] env[63028]: _type = "Task" [ 731.410288] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.416823] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735296, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.753995] env[63028]: DEBUG oslo_vmware.api [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735295, 'name': PowerOffVM_Task, 'duration_secs': 0.425851} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.753995] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 731.753995] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 731.753995] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8810ad0a-21ef-417d-b4eb-aa67976e74e9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.815921] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.816218] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.816401] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Deleting the datastore file [datastore1] 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.816667] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03dee19c-932a-4a5b-a546-bb65cdf32d75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.824259] env[63028]: DEBUG oslo_vmware.api [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 731.824259] env[63028]: value = "task-2735298" [ 731.824259] env[63028]: _type = "Task" [ 731.824259] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.834242] env[63028]: DEBUG oslo_vmware.api [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735298, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.850470] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.793s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.850470] env[63028]: DEBUG nova.compute.manager [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 731.851253] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.474s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.852889] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.853543] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.104s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.855023] env[63028]: INFO nova.compute.claims [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 731.861833] env[63028]: DEBUG nova.network.neutron [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Successfully updated port: e9be02f8-7ea6-45eb-a1cb-65fb95285caf {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 731.893442] env[63028]: INFO nova.scheduler.client.report [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Deleted allocations for instance 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3 [ 731.919545] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735296, 'name': Rename_Task, 'duration_secs': 0.294691} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.920422] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 731.920422] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80542dc1-d920-426d-98d6-89073e34ff51 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.929522] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 731.929522] env[63028]: value = "task-2735299" [ 731.929522] env[63028]: _type = "Task" [ 731.929522] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.937683] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735299, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.964351] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "a1d00736-1a8d-46e0-9358-46e848b94797" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.964612] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "a1d00736-1a8d-46e0-9358-46e848b94797" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.007506] env[63028]: DEBUG nova.compute.manager [req-7cb82bb6-010a-4b3c-8d7b-fad5be7e312e req-bb234d09-8b97-4eea-b54d-01fd355f081e service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Received event network-vif-plugged-e9be02f8-7ea6-45eb-a1cb-65fb95285caf {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 732.008106] env[63028]: DEBUG oslo_concurrency.lockutils [req-7cb82bb6-010a-4b3c-8d7b-fad5be7e312e req-bb234d09-8b97-4eea-b54d-01fd355f081e service nova] Acquiring lock "c06813c4-472d-4bf9-84ec-0d01306bcd48-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.008508] env[63028]: DEBUG oslo_concurrency.lockutils [req-7cb82bb6-010a-4b3c-8d7b-fad5be7e312e req-bb234d09-8b97-4eea-b54d-01fd355f081e service nova] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.008508] env[63028]: DEBUG oslo_concurrency.lockutils [req-7cb82bb6-010a-4b3c-8d7b-fad5be7e312e req-bb234d09-8b97-4eea-b54d-01fd355f081e service nova] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.008736] env[63028]: DEBUG nova.compute.manager [req-7cb82bb6-010a-4b3c-8d7b-fad5be7e312e req-bb234d09-8b97-4eea-b54d-01fd355f081e service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] No waiting events found dispatching network-vif-plugged-e9be02f8-7ea6-45eb-a1cb-65fb95285caf {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 732.008821] env[63028]: WARNING nova.compute.manager [req-7cb82bb6-010a-4b3c-8d7b-fad5be7e312e req-bb234d09-8b97-4eea-b54d-01fd355f081e service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Received unexpected event network-vif-plugged-e9be02f8-7ea6-45eb-a1cb-65fb95285caf for instance with vm_state building and task_state spawning. [ 732.337952] env[63028]: DEBUG oslo_vmware.api [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735298, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.279941} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.338374] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 732.338695] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 732.338948] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 732.339259] env[63028]: INFO nova.compute.manager [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Took 1.12 seconds to destroy the instance on the hypervisor. [ 732.339796] env[63028]: DEBUG oslo.service.loopingcall [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 732.340136] env[63028]: DEBUG nova.compute.manager [-] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 732.340301] env[63028]: DEBUG nova.network.neutron [-] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 732.362341] env[63028]: DEBUG nova.compute.utils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 732.366020] env[63028]: DEBUG nova.compute.manager [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 732.366020] env[63028]: DEBUG nova.network.neutron [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 732.366769] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.366887] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.367033] env[63028]: DEBUG nova.network.neutron [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 732.381846] env[63028]: DEBUG nova.network.neutron [-] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.401375] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0f3678b7-8afb-4ff2-9687-0585f2f9d289 tempest-TenantUsagesTestJSON-301234787 tempest-TenantUsagesTestJSON-301234787-project-member] Lock "8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.487s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.406540] env[63028]: DEBUG nova.policy [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cf5d21cc3d4a4663aa121a72f6c71d62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e59b9c07aa74a1dad6c966255904cff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 732.442402] env[63028]: DEBUG oslo_vmware.api [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735299, 'name': PowerOnVM_Task, 'duration_secs': 0.461348} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.443081] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 732.443305] env[63028]: INFO nova.compute.manager [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Took 9.42 seconds to spawn the instance on the hypervisor. [ 732.444233] env[63028]: DEBUG nova.compute.manager [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 732.447926] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c220423c-b683-48cf-a82d-af9420489097 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.749280] env[63028]: DEBUG nova.network.neutron [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Successfully created port: 07a0490c-4de0-43cb-be3e-6a99cf23e4bb {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 732.820349] env[63028]: DEBUG nova.compute.manager [req-059e931a-508d-4c0e-84ea-57c8d9753271 req-4fdc5e39-76e2-4504-911b-a2ad0b44b8b1 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Received event network-changed-fbe61f8e-ac8c-487d-95a7-fa4740f61aa0 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 732.820599] env[63028]: DEBUG nova.compute.manager [req-059e931a-508d-4c0e-84ea-57c8d9753271 req-4fdc5e39-76e2-4504-911b-a2ad0b44b8b1 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Refreshing instance network info cache due to event network-changed-fbe61f8e-ac8c-487d-95a7-fa4740f61aa0. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 732.820847] env[63028]: DEBUG oslo_concurrency.lockutils [req-059e931a-508d-4c0e-84ea-57c8d9753271 req-4fdc5e39-76e2-4504-911b-a2ad0b44b8b1 service nova] Acquiring lock "refresh_cache-e2d39c43-6666-4fda-b8e2-485399c59e46" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.820992] env[63028]: DEBUG oslo_concurrency.lockutils [req-059e931a-508d-4c0e-84ea-57c8d9753271 req-4fdc5e39-76e2-4504-911b-a2ad0b44b8b1 service nova] Acquired lock "refresh_cache-e2d39c43-6666-4fda-b8e2-485399c59e46" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.821448] env[63028]: DEBUG nova.network.neutron [req-059e931a-508d-4c0e-84ea-57c8d9753271 req-4fdc5e39-76e2-4504-911b-a2ad0b44b8b1 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Refreshing network info cache for port fbe61f8e-ac8c-487d-95a7-fa4740f61aa0 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 732.868139] env[63028]: DEBUG nova.compute.manager [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 732.888293] env[63028]: INFO nova.compute.manager [-] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Took 1.82 seconds to deallocate network for instance. [ 732.952389] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Acquiring lock "22713da1-ae53-4bbe-ae55-2490440cbd87" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.952617] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Lock "22713da1-ae53-4bbe-ae55-2490440cbd87" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.952827] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Acquiring lock "22713da1-ae53-4bbe-ae55-2490440cbd87-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.953092] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Lock "22713da1-ae53-4bbe-ae55-2490440cbd87-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.953180] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Lock "22713da1-ae53-4bbe-ae55-2490440cbd87-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.958574] env[63028]: INFO nova.compute.manager [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Terminating instance [ 732.966549] env[63028]: DEBUG nova.network.neutron [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.970852] env[63028]: INFO nova.compute.manager [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Took 59.77 seconds to build instance. [ 733.397638] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.446335] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6232c9cb-1651-467b-b13c-f0eec81fc081 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.453318] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c427b5c1-4b30-469d-ad31-d8c8903188e2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.457732] env[63028]: DEBUG nova.network.neutron [req-059e931a-508d-4c0e-84ea-57c8d9753271 req-4fdc5e39-76e2-4504-911b-a2ad0b44b8b1 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.462892] env[63028]: DEBUG nova.compute.manager [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 733.463328] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 733.489254] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b84f34-f567-4217-91c0-9052983dff68 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.493489] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ac78c9bd-9fb2-4d81-94df-f75521abb985 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "0d96ba8e-b46b-48ae-957c-cdc49762c395" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.505s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.495887] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685b1b7f-01ee-4218-8442-e85808e590fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.505354] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4bff370-204f-47ee-a9a1-9057c22fad0a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.511262] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 733.511963] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c448698-457a-4fab-9482-01ed95ac9585 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.524103] env[63028]: DEBUG nova.compute.provider_tree [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.526787] env[63028]: DEBUG nova.network.neutron [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance_info_cache with network_info: [{"id": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "address": "fa:16:3e:cc:b1:42", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9be02f8-7e", "ovs_interfaceid": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.529024] env[63028]: DEBUG oslo_vmware.api [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Waiting for the task: (returnval){ [ 733.529024] env[63028]: value = "task-2735300" [ 733.529024] env[63028]: _type = "Task" [ 733.529024] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.537469] env[63028]: DEBUG oslo_vmware.api [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735300, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.759204] env[63028]: DEBUG nova.network.neutron [req-059e931a-508d-4c0e-84ea-57c8d9753271 req-4fdc5e39-76e2-4504-911b-a2ad0b44b8b1 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.811189] env[63028]: DEBUG nova.network.neutron [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Successfully updated port: e0e5a360-86c4-40e6-9e4a-06cd5da44d96 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 733.883835] env[63028]: DEBUG nova.compute.manager [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 733.913441] env[63028]: DEBUG nova.virt.hardware [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 733.913680] env[63028]: DEBUG nova.virt.hardware [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.913832] env[63028]: DEBUG nova.virt.hardware [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 733.914088] env[63028]: DEBUG nova.virt.hardware [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 733.914163] env[63028]: DEBUG nova.virt.hardware [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 733.914322] env[63028]: DEBUG nova.virt.hardware [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 733.914515] env[63028]: DEBUG nova.virt.hardware [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 733.914664] env[63028]: DEBUG nova.virt.hardware [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 733.914830] env[63028]: DEBUG nova.virt.hardware [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 733.915188] env[63028]: DEBUG nova.virt.hardware [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 733.916883] env[63028]: DEBUG nova.virt.hardware [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 733.917777] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e2d497-906d-45e1-b61c-d1d40fc2b653 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.927312] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2278ecca-073b-4b12-9816-768d904aa8db {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.999048] env[63028]: DEBUG nova.compute.manager [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 734.031259] env[63028]: DEBUG nova.scheduler.client.report [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 734.035028] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.035124] env[63028]: DEBUG nova.compute.manager [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Instance network_info: |[{"id": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "address": "fa:16:3e:cc:b1:42", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9be02f8-7e", "ovs_interfaceid": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 734.042028] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:b1:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4df917f7-847a-4c0e-b0e3-69a52e4a1554', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9be02f8-7ea6-45eb-a1cb-65fb95285caf', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 734.047715] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating folder: Project (b4dcaef840f940bda057d0371cdc5adb). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.048796] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c693f94-7fbd-4ced-9a81-203354155ee7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.063085] env[63028]: DEBUG oslo_vmware.api [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735300, 'name': PowerOffVM_Task, 'duration_secs': 0.188365} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.066024] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 734.066191] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 734.066725] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Created folder: Project (b4dcaef840f940bda057d0371cdc5adb) in parent group-v550570. [ 734.066893] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating folder: Instances. Parent ref: group-v550697. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.067318] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d28b5bf-781c-441e-aae6-c2e8ed0b02b7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.069242] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9971a61b-4eb2-43b3-bc23-09b5257cb238 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.080101] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Created folder: Instances in parent group-v550697. [ 734.080423] env[63028]: DEBUG oslo.service.loopingcall [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.080672] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 734.080887] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85732be5-f554-4e4b-a089-14b71d918468 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.100253] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 734.100253] env[63028]: value = "task-2735304" [ 734.100253] env[63028]: _type = "Task" [ 734.100253] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.261850] env[63028]: DEBUG oslo_concurrency.lockutils [req-059e931a-508d-4c0e-84ea-57c8d9753271 req-4fdc5e39-76e2-4504-911b-a2ad0b44b8b1 service nova] Releasing lock "refresh_cache-e2d39c43-6666-4fda-b8e2-485399c59e46" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.262162] env[63028]: DEBUG nova.compute.manager [req-059e931a-508d-4c0e-84ea-57c8d9753271 req-4fdc5e39-76e2-4504-911b-a2ad0b44b8b1 service nova] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Received event network-vif-deleted-a290475c-c96a-4037-9a1f-e4340a86da15 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 734.262592] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 734.262832] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 734.263076] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Deleting the datastore file [datastore1] 22713da1-ae53-4bbe-ae55-2490440cbd87 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 734.263391] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de78df31-d008-4799-82cd-074881ed485a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.268583] env[63028]: DEBUG nova.compute.manager [req-f4069881-5f29-47d0-9753-62964b410772 req-dec59057-2556-401b-b5dd-2fe8c0698dcc service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Received event network-changed-e9be02f8-7ea6-45eb-a1cb-65fb95285caf {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 734.268716] env[63028]: DEBUG nova.compute.manager [req-f4069881-5f29-47d0-9753-62964b410772 req-dec59057-2556-401b-b5dd-2fe8c0698dcc service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Refreshing instance network info cache due to event network-changed-e9be02f8-7ea6-45eb-a1cb-65fb95285caf. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 734.268982] env[63028]: DEBUG oslo_concurrency.lockutils [req-f4069881-5f29-47d0-9753-62964b410772 req-dec59057-2556-401b-b5dd-2fe8c0698dcc service nova] Acquiring lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.269166] env[63028]: DEBUG oslo_concurrency.lockutils [req-f4069881-5f29-47d0-9753-62964b410772 req-dec59057-2556-401b-b5dd-2fe8c0698dcc service nova] Acquired lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.269311] env[63028]: DEBUG nova.network.neutron [req-f4069881-5f29-47d0-9753-62964b410772 req-dec59057-2556-401b-b5dd-2fe8c0698dcc service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Refreshing network info cache for port e9be02f8-7ea6-45eb-a1cb-65fb95285caf {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 734.272739] env[63028]: DEBUG oslo_vmware.api [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Waiting for the task: (returnval){ [ 734.272739] env[63028]: value = "task-2735305" [ 734.272739] env[63028]: _type = "Task" [ 734.272739] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.286611] env[63028]: DEBUG oslo_vmware.api [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.414146] env[63028]: DEBUG nova.network.neutron [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Successfully updated port: 07a0490c-4de0-43cb-be3e-6a99cf23e4bb {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 734.531014] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.540107] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.540803] env[63028]: DEBUG nova.compute.manager [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 734.545392] env[63028]: DEBUG oslo_concurrency.lockutils [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.636s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.547026] env[63028]: INFO nova.compute.claims [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.567753] env[63028]: DEBUG nova.network.neutron [-] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.613049] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735304, 'name': CreateVM_Task, 'duration_secs': 0.341118} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.613302] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 734.614020] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.614130] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.614450] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 734.614966] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6570b693-c77c-424b-811e-86a493891739 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.620544] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 734.620544] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a1989d-8de3-735e-cb3d-45d368e7eecf" [ 734.620544] env[63028]: _type = "Task" [ 734.620544] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.630451] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a1989d-8de3-735e-cb3d-45d368e7eecf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.786623] env[63028]: DEBUG oslo_vmware.api [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Task: {'id': task-2735305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147919} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.786953] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 734.787364] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 734.787672] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 734.787941] env[63028]: INFO nova.compute.manager [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Took 1.32 seconds to destroy the instance on the hypervisor. [ 734.788206] env[63028]: DEBUG oslo.service.loopingcall [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.788997] env[63028]: DEBUG nova.compute.manager [-] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 734.788997] env[63028]: DEBUG nova.network.neutron [-] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 734.921274] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Acquiring lock "refresh_cache-352ac7c3-17a8-4d7e-a66f-47ea7614892c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.921433] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Acquired lock "refresh_cache-352ac7c3-17a8-4d7e-a66f-47ea7614892c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.921576] env[63028]: DEBUG nova.network.neutron [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.971918] env[63028]: DEBUG nova.compute.manager [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Received event network-vif-plugged-e0e5a360-86c4-40e6-9e4a-06cd5da44d96 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 734.972166] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] Acquiring lock "e2d39c43-6666-4fda-b8e2-485399c59e46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.972538] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] Lock "e2d39c43-6666-4fda-b8e2-485399c59e46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.972538] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] Lock "e2d39c43-6666-4fda-b8e2-485399c59e46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.972698] env[63028]: DEBUG nova.compute.manager [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] No waiting events found dispatching network-vif-plugged-e0e5a360-86c4-40e6-9e4a-06cd5da44d96 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 734.972844] env[63028]: WARNING nova.compute.manager [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Received unexpected event network-vif-plugged-e0e5a360-86c4-40e6-9e4a-06cd5da44d96 for instance with vm_state building and task_state spawning. [ 734.973011] env[63028]: DEBUG nova.compute.manager [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Received event network-changed-e0e5a360-86c4-40e6-9e4a-06cd5da44d96 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 734.973169] env[63028]: DEBUG nova.compute.manager [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Refreshing instance network info cache due to event network-changed-e0e5a360-86c4-40e6-9e4a-06cd5da44d96. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 734.973345] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] Acquiring lock "refresh_cache-e2d39c43-6666-4fda-b8e2-485399c59e46" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.973901] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] Acquired lock "refresh_cache-e2d39c43-6666-4fda-b8e2-485399c59e46" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.974097] env[63028]: DEBUG nova.network.neutron [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Refreshing network info cache for port e0e5a360-86c4-40e6-9e4a-06cd5da44d96 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 735.048581] env[63028]: DEBUG nova.compute.utils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 735.051592] env[63028]: DEBUG nova.network.neutron [req-f4069881-5f29-47d0-9753-62964b410772 req-dec59057-2556-401b-b5dd-2fe8c0698dcc service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updated VIF entry in instance network info cache for port e9be02f8-7ea6-45eb-a1cb-65fb95285caf. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 735.051813] env[63028]: DEBUG nova.network.neutron [req-f4069881-5f29-47d0-9753-62964b410772 req-dec59057-2556-401b-b5dd-2fe8c0698dcc service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance_info_cache with network_info: [{"id": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "address": "fa:16:3e:cc:b1:42", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9be02f8-7e", "ovs_interfaceid": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.052942] env[63028]: DEBUG nova.compute.manager [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 735.053378] env[63028]: DEBUG nova.network.neutron [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 735.071160] env[63028]: INFO nova.compute.manager [-] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Took 2.73 seconds to deallocate network for instance. [ 735.137440] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a1989d-8de3-735e-cb3d-45d368e7eecf, 'name': SearchDatastore_Task, 'duration_secs': 0.009071} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.137440] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.137440] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 735.137440] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.137816] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.137816] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.137816] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12ce07f1-416d-491f-9c77-1b2e0a222786 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.147958] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.149657] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 735.149657] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-071311cc-1065-4695-b732-4b346d0878c3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.160478] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 735.160478] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52264df9-3e1c-51b5-30b4-7a6f307e8acf" [ 735.160478] env[63028]: _type = "Task" [ 735.160478] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.168670] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52264df9-3e1c-51b5-30b4-7a6f307e8acf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.180161] env[63028]: DEBUG nova.policy [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8478e45562394a0d8fafc5e3e1218fd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05118b378b5e4d838962db2378b381bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 735.554023] env[63028]: DEBUG nova.compute.manager [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 735.561269] env[63028]: DEBUG oslo_concurrency.lockutils [req-f4069881-5f29-47d0-9753-62964b410772 req-dec59057-2556-401b-b5dd-2fe8c0698dcc service nova] Releasing lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.578850] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.678292] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52264df9-3e1c-51b5-30b4-7a6f307e8acf, 'name': SearchDatastore_Task, 'duration_secs': 0.009195} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.680076] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-380692c4-4220-4faf-869c-2c8605d9e06e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.683714] env[63028]: DEBUG nova.network.neutron [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Successfully created port: b6ea40f2-2217-44b1-bf3f-727a6649149a {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 735.692804] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 735.692804] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523810ad-25fd-928b-324b-9db556e92025" [ 735.692804] env[63028]: _type = "Task" [ 735.692804] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.702417] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523810ad-25fd-928b-324b-9db556e92025, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.721978] env[63028]: DEBUG nova.network.neutron [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.735964] env[63028]: DEBUG nova.network.neutron [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.843876] env[63028]: DEBUG nova.network.neutron [-] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.943367] env[63028]: DEBUG nova.network.neutron [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.045999] env[63028]: DEBUG nova.network.neutron [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Updating instance_info_cache with network_info: [{"id": "07a0490c-4de0-43cb-be3e-6a99cf23e4bb", "address": "fa:16:3e:0c:7a:e9", "network": {"id": "a5e50c4a-1512-4be7-a5c0-319c140faf3e", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-2005706294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e59b9c07aa74a1dad6c966255904cff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07a0490c-4d", "ovs_interfaceid": "07a0490c-4de0-43cb-be3e-6a99cf23e4bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.141558] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f71df0-b36d-46e7-817d-495de5baacc7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.149625] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e79412-109b-46ed-aeb0-7cc601844341 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.185398] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74045faa-f6bd-4a1d-80f0-e24b3d75db75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.193867] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43dba69-b18c-4229-9ac7-11796c7ca493 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.209240] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523810ad-25fd-928b-324b-9db556e92025, 'name': SearchDatastore_Task, 'duration_secs': 0.009767} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.216499] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.216845] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c06813c4-472d-4bf9-84ec-0d01306bcd48/c06813c4-472d-4bf9-84ec-0d01306bcd48.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 736.217338] env[63028]: DEBUG nova.compute.provider_tree [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.218656] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9da65e8-14a0-4676-83ef-5117e6cd4325 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.227435] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 736.227435] env[63028]: value = "task-2735306" [ 736.227435] env[63028]: _type = "Task" [ 736.227435] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.235392] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735306, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.347637] env[63028]: INFO nova.compute.manager [-] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Took 1.56 seconds to deallocate network for instance. [ 736.447178] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] Releasing lock "refresh_cache-e2d39c43-6666-4fda-b8e2-485399c59e46" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.447178] env[63028]: DEBUG nova.compute.manager [req-bbc93376-c6c5-4765-8fe1-19dd2e24f068 req-e5cc261c-9f6e-434d-b5b9-fe68ae7fbcfa service nova] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Received event network-vif-deleted-8f21ea1a-db48-4941-98ae-796e9dfcf6b1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 736.512097] env[63028]: DEBUG nova.network.neutron [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Successfully updated port: d4e7f627-9b53-4218-b7e5-a5c06ee4d53e {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 736.546322] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Releasing lock "refresh_cache-352ac7c3-17a8-4d7e-a66f-47ea7614892c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.546695] env[63028]: DEBUG nova.compute.manager [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Instance network_info: |[{"id": "07a0490c-4de0-43cb-be3e-6a99cf23e4bb", "address": "fa:16:3e:0c:7a:e9", "network": {"id": "a5e50c4a-1512-4be7-a5c0-319c140faf3e", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-2005706294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e59b9c07aa74a1dad6c966255904cff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07a0490c-4d", "ovs_interfaceid": "07a0490c-4de0-43cb-be3e-6a99cf23e4bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 736.548244] env[63028]: DEBUG nova.compute.manager [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Received event network-vif-plugged-07a0490c-4de0-43cb-be3e-6a99cf23e4bb {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 736.548593] env[63028]: DEBUG oslo_concurrency.lockutils [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] Acquiring lock "352ac7c3-17a8-4d7e-a66f-47ea7614892c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.548845] env[63028]: DEBUG oslo_concurrency.lockutils [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] Lock "352ac7c3-17a8-4d7e-a66f-47ea7614892c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.549030] env[63028]: DEBUG oslo_concurrency.lockutils [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] Lock "352ac7c3-17a8-4d7e-a66f-47ea7614892c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.549248] env[63028]: DEBUG nova.compute.manager [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] No waiting events found dispatching network-vif-plugged-07a0490c-4de0-43cb-be3e-6a99cf23e4bb {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 736.549433] env[63028]: WARNING nova.compute.manager [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Received unexpected event network-vif-plugged-07a0490c-4de0-43cb-be3e-6a99cf23e4bb for instance with vm_state building and task_state spawning. [ 736.549605] env[63028]: DEBUG nova.compute.manager [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Received event network-changed-07a0490c-4de0-43cb-be3e-6a99cf23e4bb {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 736.549806] env[63028]: DEBUG nova.compute.manager [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Refreshing instance network info cache due to event network-changed-07a0490c-4de0-43cb-be3e-6a99cf23e4bb. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 736.549996] env[63028]: DEBUG oslo_concurrency.lockutils [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] Acquiring lock "refresh_cache-352ac7c3-17a8-4d7e-a66f-47ea7614892c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.550586] env[63028]: DEBUG oslo_concurrency.lockutils [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] Acquired lock "refresh_cache-352ac7c3-17a8-4d7e-a66f-47ea7614892c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.550586] env[63028]: DEBUG nova.network.neutron [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Refreshing network info cache for port 07a0490c-4de0-43cb-be3e-6a99cf23e4bb {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 736.552072] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:7a:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0c293d47-74c0-49d7-a474-cdb643080f6f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07a0490c-4de0-43cb-be3e-6a99cf23e4bb', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 736.562172] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Creating folder: Project (0e59b9c07aa74a1dad6c966255904cff). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 736.566148] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78a6bad4-3128-4315-9878-4dcb9799242c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.569029] env[63028]: DEBUG nova.compute.manager [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 736.583026] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Created folder: Project (0e59b9c07aa74a1dad6c966255904cff) in parent group-v550570. [ 736.583026] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Creating folder: Instances. Parent ref: group-v550700. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 736.583026] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57419632-da7c-491c-abcb-c7532d6e7158 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.598183] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Created folder: Instances in parent group-v550700. [ 736.598183] env[63028]: DEBUG oslo.service.loopingcall [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 736.598183] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 736.598183] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c05b329-033f-4613-9a3f-5a11b6bc97f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.626322] env[63028]: DEBUG nova.virt.hardware [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 736.626322] env[63028]: DEBUG nova.virt.hardware [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 736.626322] env[63028]: DEBUG nova.virt.hardware [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 736.626498] env[63028]: DEBUG nova.virt.hardware [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 736.626498] env[63028]: DEBUG nova.virt.hardware [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 736.626498] env[63028]: DEBUG nova.virt.hardware [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 736.626498] env[63028]: DEBUG nova.virt.hardware [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 736.626498] env[63028]: DEBUG nova.virt.hardware [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 736.626670] env[63028]: DEBUG nova.virt.hardware [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 736.626670] env[63028]: DEBUG nova.virt.hardware [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 736.626670] env[63028]: DEBUG nova.virt.hardware [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 736.628514] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb61f63-0f96-43f2-a715-309c4891dbbf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.640935] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff24d05-9b8b-431b-a14d-a3e7323229fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.647082] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 736.647082] env[63028]: value = "task-2735309" [ 736.647082] env[63028]: _type = "Task" [ 736.647082] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.669785] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735309, 'name': CreateVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.722706] env[63028]: DEBUG nova.scheduler.client.report [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 736.741137] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735306, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510608} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.743982] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c06813c4-472d-4bf9-84ec-0d01306bcd48/c06813c4-472d-4bf9-84ec-0d01306bcd48.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 736.745409] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 736.745409] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea41c5b5-55fe-4b0f-acb3-0bda77fee0c5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.751894] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 736.751894] env[63028]: value = "task-2735310" [ 736.751894] env[63028]: _type = "Task" [ 736.751894] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.762824] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.854166] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.909415] env[63028]: DEBUG nova.network.neutron [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Updated VIF entry in instance network info cache for port 07a0490c-4de0-43cb-be3e-6a99cf23e4bb. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 736.909415] env[63028]: DEBUG nova.network.neutron [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Updating instance_info_cache with network_info: [{"id": "07a0490c-4de0-43cb-be3e-6a99cf23e4bb", "address": "fa:16:3e:0c:7a:e9", "network": {"id": "a5e50c4a-1512-4be7-a5c0-319c140faf3e", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-2005706294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e59b9c07aa74a1dad6c966255904cff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07a0490c-4d", "ovs_interfaceid": "07a0490c-4de0-43cb-be3e-6a99cf23e4bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.015156] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "refresh_cache-e2d39c43-6666-4fda-b8e2-485399c59e46" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.015283] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquired lock "refresh_cache-e2d39c43-6666-4fda-b8e2-485399c59e46" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.015442] env[63028]: DEBUG nova.network.neutron [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 737.161070] env[63028]: DEBUG nova.compute.manager [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Received event network-vif-deleted-13472b65-beb6-4fe6-82c2-f1a6516c48a3 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 737.161344] env[63028]: DEBUG nova.compute.manager [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Received event network-vif-plugged-d4e7f627-9b53-4218-b7e5-a5c06ee4d53e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 737.161668] env[63028]: DEBUG oslo_concurrency.lockutils [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] Acquiring lock "e2d39c43-6666-4fda-b8e2-485399c59e46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.161875] env[63028]: DEBUG oslo_concurrency.lockutils [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] Lock "e2d39c43-6666-4fda-b8e2-485399c59e46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.162104] env[63028]: DEBUG oslo_concurrency.lockutils [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] Lock "e2d39c43-6666-4fda-b8e2-485399c59e46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.162339] env[63028]: DEBUG nova.compute.manager [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] No waiting events found dispatching network-vif-plugged-d4e7f627-9b53-4218-b7e5-a5c06ee4d53e {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 737.162519] env[63028]: WARNING nova.compute.manager [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Received unexpected event network-vif-plugged-d4e7f627-9b53-4218-b7e5-a5c06ee4d53e for instance with vm_state building and task_state spawning. [ 737.162710] env[63028]: DEBUG nova.compute.manager [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Received event network-changed-d4e7f627-9b53-4218-b7e5-a5c06ee4d53e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 737.162899] env[63028]: DEBUG nova.compute.manager [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Refreshing instance network info cache due to event network-changed-d4e7f627-9b53-4218-b7e5-a5c06ee4d53e. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 737.163106] env[63028]: DEBUG oslo_concurrency.lockutils [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] Acquiring lock "refresh_cache-e2d39c43-6666-4fda-b8e2-485399c59e46" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.167805] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735309, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.231248] env[63028]: DEBUG oslo_concurrency.lockutils [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.231686] env[63028]: DEBUG nova.compute.manager [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 737.234488] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.884s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.238108] env[63028]: INFO nova.compute.claims [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.268122] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735310, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167555} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.268717] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 737.269941] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68f877b-6393-498f-ad10-97706398fce7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.304852] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] c06813c4-472d-4bf9-84ec-0d01306bcd48/c06813c4-472d-4bf9-84ec-0d01306bcd48.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 737.305990] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e1f5511-4282-46ab-ac3f-648a08669ee3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.327509] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 737.327509] env[63028]: value = "task-2735311" [ 737.327509] env[63028]: _type = "Task" [ 737.327509] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.337054] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735311, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.416618] env[63028]: DEBUG oslo_concurrency.lockutils [req-cab41a81-bc15-4fa5-afd6-7373e809318d req-2e762031-dc41-48cf-aaa8-a0128d1d04a6 service nova] Releasing lock "refresh_cache-352ac7c3-17a8-4d7e-a66f-47ea7614892c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.580840] env[63028]: DEBUG nova.network.neutron [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.661584] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735309, 'name': CreateVM_Task, 'duration_secs': 0.760831} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.661862] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 737.662691] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.662812] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.663178] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 737.664438] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60591bff-c7e3-404d-a015-25f26a7ae93b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.671668] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Waiting for the task: (returnval){ [ 737.671668] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5260e7da-b10c-a5d9-2b2c-232109fd1db2" [ 737.671668] env[63028]: _type = "Task" [ 737.671668] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.681934] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5260e7da-b10c-a5d9-2b2c-232109fd1db2, 'name': SearchDatastore_Task} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.682203] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.684164] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.684164] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.684164] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.684164] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.684164] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cf8e530-9f9b-4596-8d72-067f8147019f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.702834] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcb9cdb6-1a08-4676-8d51-ba444b4f20e0 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.703110] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcb9cdb6-1a08-4676-8d51-ba444b4f20e0 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.703287] env[63028]: DEBUG nova.compute.manager [None req-bcb9cdb6-1a08-4676-8d51-ba444b4f20e0 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 737.703591] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.703800] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 737.705443] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa21941c-ea44-44b5-8068-4d0aabcfb664 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.709674] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f7ba352-fd48-4ca3-ac54-895afcdeab33 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.720640] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Waiting for the task: (returnval){ [ 737.720640] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5230d4a0-2a20-5626-cee9-ecd11105f7ac" [ 737.720640] env[63028]: _type = "Task" [ 737.720640] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.725553] env[63028]: DEBUG nova.compute.manager [None req-bcb9cdb6-1a08-4676-8d51-ba444b4f20e0 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63028) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 737.726224] env[63028]: DEBUG nova.objects.instance [None req-bcb9cdb6-1a08-4676-8d51-ba444b4f20e0 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lazy-loading 'flavor' on Instance uuid 6e0959ac-8fca-47eb-b501-b50a3e9f025a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 737.737397] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5230d4a0-2a20-5626-cee9-ecd11105f7ac, 'name': SearchDatastore_Task, 'duration_secs': 0.014019} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.738379] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62290db4-31e8-4820-bb30-b072b00a5f5c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.744272] env[63028]: DEBUG nova.compute.utils [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 737.745235] env[63028]: DEBUG nova.compute.manager [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 737.745460] env[63028]: DEBUG nova.network.neutron [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 737.751838] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Waiting for the task: (returnval){ [ 737.751838] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52649280-147a-a467-32d7-01731f464ac4" [ 737.751838] env[63028]: _type = "Task" [ 737.751838] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.764209] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52649280-147a-a467-32d7-01731f464ac4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.839540] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735311, 'name': ReconfigVM_Task, 'duration_secs': 0.320661} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.839806] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Reconfigured VM instance instance-00000029 to attach disk [datastore2] c06813c4-472d-4bf9-84ec-0d01306bcd48/c06813c4-472d-4bf9-84ec-0d01306bcd48.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 737.840461] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a43571fe-715c-4f0f-9190-0b72fd16f5f7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.849331] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 737.849331] env[63028]: value = "task-2735312" [ 737.849331] env[63028]: _type = "Task" [ 737.849331] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.858267] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735312, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.881455] env[63028]: DEBUG nova.network.neutron [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Successfully updated port: b6ea40f2-2217-44b1-bf3f-727a6649149a {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 737.903706] env[63028]: DEBUG nova.policy [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef119219bd554fd1b6339dda873bb6a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f16a87f7b9b420eb1310ad086f4124c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 738.246402] env[63028]: DEBUG nova.compute.manager [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 738.270032] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52649280-147a-a467-32d7-01731f464ac4, 'name': SearchDatastore_Task, 'duration_secs': 0.0104} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.270032] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.270317] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 352ac7c3-17a8-4d7e-a66f-47ea7614892c/352ac7c3-17a8-4d7e-a66f-47ea7614892c.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 738.270416] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74b6e2c8-f970-4eea-a3d5-1e837e2a7777 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.281413] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Waiting for the task: (returnval){ [ 738.281413] env[63028]: value = "task-2735313" [ 738.281413] env[63028]: _type = "Task" [ 738.281413] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.291175] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735313, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.359325] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735312, 'name': Rename_Task, 'duration_secs': 0.174453} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.362245] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 738.362245] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc51c844-7c19-4b8c-8994-2ba990d6e259 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.368924] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 738.368924] env[63028]: value = "task-2735314" [ 738.368924] env[63028]: _type = "Task" [ 738.368924] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.381614] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735314, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.384345] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "refresh_cache-50e4934b-b9b1-4887-b5d1-95a37fbf4c41" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.384490] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "refresh_cache-50e4934b-b9b1-4887-b5d1-95a37fbf4c41" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.384662] env[63028]: DEBUG nova.network.neutron [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 738.698063] env[63028]: DEBUG nova.compute.manager [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Received event network-vif-plugged-b6ea40f2-2217-44b1-bf3f-727a6649149a {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 738.698456] env[63028]: DEBUG oslo_concurrency.lockutils [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] Acquiring lock "50e4934b-b9b1-4887-b5d1-95a37fbf4c41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.698672] env[63028]: DEBUG oslo_concurrency.lockutils [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] Lock "50e4934b-b9b1-4887-b5d1-95a37fbf4c41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.698800] env[63028]: DEBUG oslo_concurrency.lockutils [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] Lock "50e4934b-b9b1-4887-b5d1-95a37fbf4c41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.698985] env[63028]: DEBUG nova.compute.manager [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] No waiting events found dispatching network-vif-plugged-b6ea40f2-2217-44b1-bf3f-727a6649149a {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 738.699783] env[63028]: WARNING nova.compute.manager [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Received unexpected event network-vif-plugged-b6ea40f2-2217-44b1-bf3f-727a6649149a for instance with vm_state building and task_state spawning. [ 738.700119] env[63028]: DEBUG nova.compute.manager [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Received event network-changed-b6ea40f2-2217-44b1-bf3f-727a6649149a {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 738.700335] env[63028]: DEBUG nova.compute.manager [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Refreshing instance network info cache due to event network-changed-b6ea40f2-2217-44b1-bf3f-727a6649149a. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 738.700635] env[63028]: DEBUG oslo_concurrency.lockutils [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] Acquiring lock "refresh_cache-50e4934b-b9b1-4887-b5d1-95a37fbf4c41" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.738463] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcb9cdb6-1a08-4676-8d51-ba444b4f20e0 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 738.739116] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3eeb7f40-0d0c-4af0-8387-5042dcdd7b71 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.750087] env[63028]: DEBUG oslo_vmware.api [None req-bcb9cdb6-1a08-4676-8d51-ba444b4f20e0 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 738.750087] env[63028]: value = "task-2735315" [ 738.750087] env[63028]: _type = "Task" [ 738.750087] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.760287] env[63028]: INFO nova.virt.block_device [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Booting with volume 4246155e-0977-4f2a-b135-72a3849826ce at /dev/sda [ 738.768690] env[63028]: DEBUG oslo_vmware.api [None req-bcb9cdb6-1a08-4676-8d51-ba444b4f20e0 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735315, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.795466] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735313, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.817883] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-def7b344-e93b-4b1f-8761-58ad4c803ffb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.830189] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1af07ab-bf18-4212-be57-4e6e2881992b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.880957] env[63028]: DEBUG nova.network.neutron [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Updating instance_info_cache with network_info: [{"id": "fbe61f8e-ac8c-487d-95a7-fa4740f61aa0", "address": "fa:16:3e:b6:4e:03", "network": {"id": "4f362bbc-35c9-4673-b6a6-8a7ea5638da5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-92092290", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbe61f8e-ac", "ovs_interfaceid": "fbe61f8e-ac8c-487d-95a7-fa4740f61aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e0e5a360-86c4-40e6-9e4a-06cd5da44d96", "address": "fa:16:3e:b7:6d:92", "network": {"id": "f31308ae-79dd-4944-81e2-153058a3ea34", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1582446931", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0e5a360-86", "ovs_interfaceid": "e0e5a360-86c4-40e6-9e4a-06cd5da44d96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d4e7f627-9b53-4218-b7e5-a5c06ee4d53e", "address": "fa:16:3e:1b:c7:dc", "network": {"id": "4f362bbc-35c9-4673-b6a6-8a7ea5638da5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-92092290", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.54", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4e7f627-9b", "ovs_interfaceid": "d4e7f627-9b53-4218-b7e5-a5c06ee4d53e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.896859] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fdbd8395-496b-44f0-82e8-0ea9eed0eb84 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.913488] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735314, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.918243] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a60db1-c7d7-4c8c-af33-28a0d6f9da7f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.967148] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d895f1-e2c2-4417-8b7f-b90abdfb5330 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.975468] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab82c97-fd2e-45ef-aa3d-58573beea1f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.983024] env[63028]: DEBUG nova.network.neutron [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Successfully created port: f8cad445-9a0b-4d25-84a3-df0521f45d9f {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.011478] env[63028]: DEBUG nova.virt.block_device [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Updating existing volume attachment record: c29e7349-1680-4a80-8385-c29f4f3ae172 {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 739.014865] env[63028]: DEBUG nova.network.neutron [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.031139] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a6aa1e-9f98-4ead-ba5c-4d187ddd8ebf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.041662] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb296d1-bb24-48b1-be20-f88851a53d23 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.082198] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5234f51-c41d-4ad7-bf59-6266de1153a1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.098857] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e96b31-7447-4aa7-bac4-58a511c50abb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.114069] env[63028]: DEBUG nova.compute.provider_tree [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.263131] env[63028]: DEBUG oslo_vmware.api [None req-bcb9cdb6-1a08-4676-8d51-ba444b4f20e0 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735315, 'name': PowerOffVM_Task, 'duration_secs': 0.349578} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.263630] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcb9cdb6-1a08-4676-8d51-ba444b4f20e0 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 739.264245] env[63028]: DEBUG nova.compute.manager [None req-bcb9cdb6-1a08-4676-8d51-ba444b4f20e0 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 739.266810] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99883347-1e16-44c8-8ebf-a8f5ee489e09 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.295206] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735313, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713099} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.295496] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 352ac7c3-17a8-4d7e-a66f-47ea7614892c/352ac7c3-17a8-4d7e-a66f-47ea7614892c.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 739.295766] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 739.295954] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f59fc55-e7fa-418b-9f95-310b813e0089 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.305354] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Waiting for the task: (returnval){ [ 739.305354] env[63028]: value = "task-2735316" [ 739.305354] env[63028]: _type = "Task" [ 739.305354] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.319301] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735316, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.386339] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Releasing lock "refresh_cache-e2d39c43-6666-4fda-b8e2-485399c59e46" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.386783] env[63028]: DEBUG nova.compute.manager [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Instance network_info: |[{"id": "fbe61f8e-ac8c-487d-95a7-fa4740f61aa0", "address": "fa:16:3e:b6:4e:03", "network": {"id": "4f362bbc-35c9-4673-b6a6-8a7ea5638da5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-92092290", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbe61f8e-ac", "ovs_interfaceid": "fbe61f8e-ac8c-487d-95a7-fa4740f61aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e0e5a360-86c4-40e6-9e4a-06cd5da44d96", "address": "fa:16:3e:b7:6d:92", "network": {"id": "f31308ae-79dd-4944-81e2-153058a3ea34", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1582446931", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0e5a360-86", "ovs_interfaceid": "e0e5a360-86c4-40e6-9e4a-06cd5da44d96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d4e7f627-9b53-4218-b7e5-a5c06ee4d53e", "address": "fa:16:3e:1b:c7:dc", "network": {"id": "4f362bbc-35c9-4673-b6a6-8a7ea5638da5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-92092290", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.54", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4e7f627-9b", "ovs_interfaceid": "d4e7f627-9b53-4218-b7e5-a5c06ee4d53e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 739.387111] env[63028]: DEBUG oslo_concurrency.lockutils [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] Acquired lock "refresh_cache-e2d39c43-6666-4fda-b8e2-485399c59e46" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.387299] env[63028]: DEBUG nova.network.neutron [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Refreshing network info cache for port d4e7f627-9b53-4218-b7e5-a5c06ee4d53e {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 739.388508] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:4e:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbe61f8e-ac8c-487d-95a7-fa4740f61aa0', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:6d:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2624812a-9f9c-461d-8b5f-79bea90c7ad3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0e5a360-86c4-40e6-9e4a-06cd5da44d96', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:c7:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd4e7f627-9b53-4218-b7e5-a5c06ee4d53e', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 739.400975] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Creating folder: Project (97060d5fb7e8454eadaf5dc9b426a248). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 739.402572] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d59a5c76-7ff6-4e0f-8484-0377d220d04d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.408987] env[63028]: DEBUG oslo_vmware.api [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735314, 'name': PowerOnVM_Task, 'duration_secs': 0.710206} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.409170] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 739.409359] env[63028]: INFO nova.compute.manager [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Took 8.33 seconds to spawn the instance on the hypervisor. [ 739.409791] env[63028]: DEBUG nova.compute.manager [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 739.410361] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3701cf1b-f72a-4470-a298-f17f10066bd1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.425958] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Created folder: Project (97060d5fb7e8454eadaf5dc9b426a248) in parent group-v550570. [ 739.426685] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Creating folder: Instances. Parent ref: group-v550703. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 739.426685] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c623cc6-83ff-497c-b266-42e9d28b737c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.430336] env[63028]: DEBUG nova.network.neutron [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Updating instance_info_cache with network_info: [{"id": "b6ea40f2-2217-44b1-bf3f-727a6649149a", "address": "fa:16:3e:1e:ca:d2", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6ea40f2-22", "ovs_interfaceid": "b6ea40f2-2217-44b1-bf3f-727a6649149a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.447964] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Created folder: Instances in parent group-v550703. [ 739.450028] env[63028]: DEBUG oslo.service.loopingcall [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 739.450028] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 739.450028] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a64aa1b-9ab3-4b4c-a6aa-3d501fc95d76 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.479394] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 739.479394] env[63028]: value = "task-2735319" [ 739.479394] env[63028]: _type = "Task" [ 739.479394] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.493710] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735319, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.617409] env[63028]: DEBUG nova.scheduler.client.report [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 739.781136] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcb9cdb6-1a08-4676-8d51-ba444b4f20e0 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.078s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.824559] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735316, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070559} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.824875] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 739.825763] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb83dc7c-e099-482b-97af-ab06d3fea7b8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.859138] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 352ac7c3-17a8-4d7e-a66f-47ea7614892c/352ac7c3-17a8-4d7e-a66f-47ea7614892c.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 739.859496] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03fae122-7564-411a-9b93-431518abbe1e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.883201] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Waiting for the task: (returnval){ [ 739.883201] env[63028]: value = "task-2735320" [ 739.883201] env[63028]: _type = "Task" [ 739.883201] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.894170] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735320, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.934477] env[63028]: INFO nova.compute.manager [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Took 48.27 seconds to build instance. [ 739.934477] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "refresh_cache-50e4934b-b9b1-4887-b5d1-95a37fbf4c41" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.934612] env[63028]: DEBUG nova.compute.manager [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Instance network_info: |[{"id": "b6ea40f2-2217-44b1-bf3f-727a6649149a", "address": "fa:16:3e:1e:ca:d2", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6ea40f2-22", "ovs_interfaceid": "b6ea40f2-2217-44b1-bf3f-727a6649149a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 739.936134] env[63028]: DEBUG oslo_concurrency.lockutils [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] Acquired lock "refresh_cache-50e4934b-b9b1-4887-b5d1-95a37fbf4c41" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.936134] env[63028]: DEBUG nova.network.neutron [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Refreshing network info cache for port b6ea40f2-2217-44b1-bf3f-727a6649149a {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 739.936679] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:ca:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5446413d-c3b0-4cd2-a962-62240db178ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6ea40f2-2217-44b1-bf3f-727a6649149a', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 739.945165] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Creating folder: Project (05118b378b5e4d838962db2378b381bc). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 739.947309] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca8db294-c2cf-4d8c-9ef0-8e671ec84141 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.953934] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "63524cd8-81de-419f-bb07-0326f3cb393f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.954173] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "63524cd8-81de-419f-bb07-0326f3cb393f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.966538] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Created folder: Project (05118b378b5e4d838962db2378b381bc) in parent group-v550570. [ 739.966538] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Creating folder: Instances. Parent ref: group-v550706. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 739.966538] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e59da097-a3f4-4393-9ce2-e91d8f4abf56 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.978451] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Created folder: Instances in parent group-v550706. [ 739.978451] env[63028]: DEBUG oslo.service.loopingcall [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 739.978451] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 739.978451] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-427dc835-708d-4c95-bb10-61a4aad21826 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.011821] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735319, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.013627] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.013627] env[63028]: value = "task-2735323" [ 740.013627] env[63028]: _type = "Task" [ 740.013627] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.036310] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735323, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.123073] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.888s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.124973] env[63028]: DEBUG nova.compute.manager [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 740.129878] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.533s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.130400] env[63028]: DEBUG nova.objects.instance [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lazy-loading 'resources' on Instance uuid b9db75ba-6832-45e8-8faf-d1cdaa7dabdd {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 740.391637] env[63028]: DEBUG nova.network.neutron [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Updated VIF entry in instance network info cache for port d4e7f627-9b53-4218-b7e5-a5c06ee4d53e. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 740.392082] env[63028]: DEBUG nova.network.neutron [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Updating instance_info_cache with network_info: [{"id": "fbe61f8e-ac8c-487d-95a7-fa4740f61aa0", "address": "fa:16:3e:b6:4e:03", "network": {"id": "4f362bbc-35c9-4673-b6a6-8a7ea5638da5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-92092290", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbe61f8e-ac", "ovs_interfaceid": "fbe61f8e-ac8c-487d-95a7-fa4740f61aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e0e5a360-86c4-40e6-9e4a-06cd5da44d96", "address": "fa:16:3e:b7:6d:92", "network": {"id": "f31308ae-79dd-4944-81e2-153058a3ea34", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1582446931", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0e5a360-86", "ovs_interfaceid": "e0e5a360-86c4-40e6-9e4a-06cd5da44d96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d4e7f627-9b53-4218-b7e5-a5c06ee4d53e", "address": "fa:16:3e:1b:c7:dc", "network": {"id": "4f362bbc-35c9-4673-b6a6-8a7ea5638da5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-92092290", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.54", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4e7f627-9b", "ovs_interfaceid": "d4e7f627-9b53-4218-b7e5-a5c06ee4d53e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.400697] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735320, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.449079] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e3a9737-fb71-42ad-adca-40f4f56c8c52 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.803s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.516839] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735319, 'name': CreateVM_Task, 'duration_secs': 0.860786} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.522458] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 740.523744] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.523744] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.524217] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 740.524926] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71ca79e5-70fc-432d-b7bb-3a56a241943b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.531050] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735323, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.535378] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 740.535378] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528c7038-37b1-d4e1-a48b-54edfbe3f3da" [ 740.535378] env[63028]: _type = "Task" [ 740.535378] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.550377] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528c7038-37b1-d4e1-a48b-54edfbe3f3da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.639208] env[63028]: DEBUG nova.compute.utils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 740.643808] env[63028]: DEBUG nova.compute.manager [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 740.644094] env[63028]: DEBUG nova.network.neutron [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 740.747022] env[63028]: DEBUG nova.policy [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '70f27358d4184787bd66379bf75fc4ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10a088f86d7a4b4db39113824aec83b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 740.895526] env[63028]: DEBUG oslo_concurrency.lockutils [req-208cacbf-5a94-4e6c-bc4c-0087f53ece4d req-2abe6743-9822-46be-a69a-62357401cbdd service nova] Releasing lock "refresh_cache-e2d39c43-6666-4fda-b8e2-485399c59e46" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.901942] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735320, 'name': ReconfigVM_Task, 'duration_secs': 0.750934} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.902485] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 352ac7c3-17a8-4d7e-a66f-47ea7614892c/352ac7c3-17a8-4d7e-a66f-47ea7614892c.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 740.903179] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40e841fa-bc6e-4763-9b12-075087f90637 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.911613] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Waiting for the task: (returnval){ [ 740.911613] env[63028]: value = "task-2735324" [ 740.911613] env[63028]: _type = "Task" [ 740.911613] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.925926] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735324, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.955302] env[63028]: DEBUG nova.compute.manager [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 741.037324] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735323, 'name': CreateVM_Task, 'duration_secs': 0.651068} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.041803] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 741.045558] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.045558] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.045558] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 741.045558] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5ceca1c-4bbe-4bd7-ba6c-f2b71309f170 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.053136] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528c7038-37b1-d4e1-a48b-54edfbe3f3da, 'name': SearchDatastore_Task, 'duration_secs': 0.040454} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.053371] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.053648] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.053891] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.055145] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.060108] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 741.060108] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c12a10f7-fc38-4517-ba65-051952544f3b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.060322] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 741.060322] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529f1b22-048c-7b1d-02c1-7b6bc708761f" [ 741.060322] env[63028]: _type = "Task" [ 741.060322] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.068582] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.068795] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 741.070348] env[63028]: DEBUG nova.network.neutron [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Updated VIF entry in instance network info cache for port b6ea40f2-2217-44b1-bf3f-727a6649149a. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 741.074647] env[63028]: DEBUG nova.network.neutron [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Updating instance_info_cache with network_info: [{"id": "b6ea40f2-2217-44b1-bf3f-727a6649149a", "address": "fa:16:3e:1e:ca:d2", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6ea40f2-22", "ovs_interfaceid": "b6ea40f2-2217-44b1-bf3f-727a6649149a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.074647] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9568774c-4f8d-4c66-8c38-5b1817ff9de7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.079411] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529f1b22-048c-7b1d-02c1-7b6bc708761f, 'name': SearchDatastore_Task, 'duration_secs': 0.011556} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.081352] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.081352] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.081352] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.081352] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.081352] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 741.081570] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34b99b3d-046c-475e-ae6c-754b4f9f7c35 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.104091] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 741.104091] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529ae513-5217-8019-f650-ea9e061d6dbf" [ 741.104091] env[63028]: _type = "Task" [ 741.104091] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.120878] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.121075] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 741.123016] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c658acf6-7389-426a-a2d6-ac46a27d3a41 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.132216] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529ae513-5217-8019-f650-ea9e061d6dbf, 'name': SearchDatastore_Task, 'duration_secs': 0.012123} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.133953] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15359752-c18b-4160-8ebe-3c20da1bbca1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.140356] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 741.140356] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5285f4f2-2669-a539-ef3a-ce15368ac571" [ 741.140356] env[63028]: _type = "Task" [ 741.140356] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.148851] env[63028]: DEBUG nova.compute.manager [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 741.149636] env[63028]: DEBUG nova.virt.hardware [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 741.150044] env[63028]: DEBUG nova.virt.hardware [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.150323] env[63028]: DEBUG nova.virt.hardware [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 741.150623] env[63028]: DEBUG nova.virt.hardware [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.150906] env[63028]: DEBUG nova.virt.hardware [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 741.151396] env[63028]: DEBUG nova.virt.hardware [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 741.151724] env[63028]: DEBUG nova.virt.hardware [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 741.152694] env[63028]: DEBUG nova.virt.hardware [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 741.152694] env[63028]: DEBUG nova.virt.hardware [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 741.152694] env[63028]: DEBUG nova.virt.hardware [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 741.152694] env[63028]: DEBUG nova.virt.hardware [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 741.153500] env[63028]: DEBUG nova.compute.manager [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 741.158395] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae74ef87-b7bc-4fe8-8582-a33aba50ba31 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.162147] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 741.162147] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52563e21-603d-913b-9254-c0874ba6c508" [ 741.162147] env[63028]: _type = "Task" [ 741.162147] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.173464] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5285f4f2-2669-a539-ef3a-ce15368ac571, 'name': SearchDatastore_Task, 'duration_secs': 0.012448} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.178347] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7cef88-1cd1-4f95-9846-c0359cd4b9cf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.186892] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fe4c4e5-1bc3-4987-a662-f46653917f07 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.189811] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52563e21-603d-913b-9254-c0874ba6c508, 'name': SearchDatastore_Task, 'duration_secs': 0.010857} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.190083] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.190305] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] e2d39c43-6666-4fda-b8e2-485399c59e46/e2d39c43-6666-4fda-b8e2-485399c59e46.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 741.190826] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a35bb5d-531a-4d90-bfef-6b3009b61c1b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.204471] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 741.204471] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527d04c0-5ec2-19be-980e-41951978846a" [ 741.204471] env[63028]: _type = "Task" [ 741.204471] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.214542] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 741.214542] env[63028]: value = "task-2735325" [ 741.214542] env[63028]: _type = "Task" [ 741.214542] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.223904] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527d04c0-5ec2-19be-980e-41951978846a, 'name': SearchDatastore_Task, 'duration_secs': 0.010079} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.227864] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.228125] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 50e4934b-b9b1-4887-b5d1-95a37fbf4c41/50e4934b-b9b1-4887-b5d1-95a37fbf4c41.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 741.228612] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-183bc7cc-97d6-4c01-84e1-420b90aed7d3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.234844] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.240852] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 741.240852] env[63028]: value = "task-2735326" [ 741.240852] env[63028]: _type = "Task" [ 741.240852] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.252453] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735326, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.344276] env[63028]: DEBUG nova.objects.instance [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lazy-loading 'flavor' on Instance uuid 6e0959ac-8fca-47eb-b501-b50a3e9f025a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 741.389954] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c067a8a-a324-4649-82a9-dcc4f17b0165 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.406751] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f563859d-2461-407d-a539-f3388d63d344 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.445783] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b7cf8c-f2e2-440a-8c5e-0ff2eae275d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.451468] env[63028]: DEBUG nova.network.neutron [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Successfully created port: 9917de69-098c-41fd-8a7e-63885001786a {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 741.457411] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735324, 'name': Rename_Task, 'duration_secs': 0.195564} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.458634] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 741.461685] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-705d77ee-0629-4b79-94a4-e9fb1d2b6d5d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.471875] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4204062b-d0d3-4d59-aa42-e07b9c999cb6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.479324] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Waiting for the task: (returnval){ [ 741.479324] env[63028]: value = "task-2735327" [ 741.479324] env[63028]: _type = "Task" [ 741.479324] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.499266] env[63028]: DEBUG nova.compute.provider_tree [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.500842] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.510591] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735327, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.580629] env[63028]: DEBUG oslo_concurrency.lockutils [req-2b4331b9-31ed-45fd-aac1-5abb81af6a3b req-8ef1362d-606c-4cb8-bc0d-6b56490f28b7 service nova] Releasing lock "refresh_cache-50e4934b-b9b1-4887-b5d1-95a37fbf4c41" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.738359] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735325, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.761846] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735326, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.851277] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "refresh_cache-6e0959ac-8fca-47eb-b501-b50a3e9f025a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.851597] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquired lock "refresh_cache-6e0959ac-8fca-47eb-b501-b50a3e9f025a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.851801] env[63028]: DEBUG nova.network.neutron [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.852148] env[63028]: DEBUG nova.objects.instance [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lazy-loading 'info_cache' on Instance uuid 6e0959ac-8fca-47eb-b501-b50a3e9f025a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 741.995620] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735327, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.003107] env[63028]: DEBUG nova.scheduler.client.report [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 742.046614] env[63028]: DEBUG nova.compute.manager [req-07d7d296-d1ea-4436-89d2-a840076d3a86 req-2242a8df-b891-4555-be62-2b63b1d1b5ff service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Received event network-vif-plugged-f8cad445-9a0b-4d25-84a3-df0521f45d9f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 742.046888] env[63028]: DEBUG oslo_concurrency.lockutils [req-07d7d296-d1ea-4436-89d2-a840076d3a86 req-2242a8df-b891-4555-be62-2b63b1d1b5ff service nova] Acquiring lock "600195de-ceb4-41a6-9ade-dda8b898e4db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.047063] env[63028]: DEBUG oslo_concurrency.lockutils [req-07d7d296-d1ea-4436-89d2-a840076d3a86 req-2242a8df-b891-4555-be62-2b63b1d1b5ff service nova] Lock "600195de-ceb4-41a6-9ade-dda8b898e4db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.047237] env[63028]: DEBUG oslo_concurrency.lockutils [req-07d7d296-d1ea-4436-89d2-a840076d3a86 req-2242a8df-b891-4555-be62-2b63b1d1b5ff service nova] Lock "600195de-ceb4-41a6-9ade-dda8b898e4db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.047406] env[63028]: DEBUG nova.compute.manager [req-07d7d296-d1ea-4436-89d2-a840076d3a86 req-2242a8df-b891-4555-be62-2b63b1d1b5ff service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] No waiting events found dispatching network-vif-plugged-f8cad445-9a0b-4d25-84a3-df0521f45d9f {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 742.047576] env[63028]: WARNING nova.compute.manager [req-07d7d296-d1ea-4436-89d2-a840076d3a86 req-2242a8df-b891-4555-be62-2b63b1d1b5ff service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Received unexpected event network-vif-plugged-f8cad445-9a0b-4d25-84a3-df0521f45d9f for instance with vm_state building and task_state spawning. [ 742.080431] env[63028]: DEBUG nova.network.neutron [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Successfully updated port: f8cad445-9a0b-4d25-84a3-df0521f45d9f {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 742.171607] env[63028]: DEBUG nova.compute.manager [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 742.201735] env[63028]: DEBUG nova.virt.hardware [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 742.201980] env[63028]: DEBUG nova.virt.hardware [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.202159] env[63028]: DEBUG nova.virt.hardware [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 742.202377] env[63028]: DEBUG nova.virt.hardware [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.202493] env[63028]: DEBUG nova.virt.hardware [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 742.202711] env[63028]: DEBUG nova.virt.hardware [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 742.203234] env[63028]: DEBUG nova.virt.hardware [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 742.203410] env[63028]: DEBUG nova.virt.hardware [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 742.203585] env[63028]: DEBUG nova.virt.hardware [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 742.203748] env[63028]: DEBUG nova.virt.hardware [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 742.203919] env[63028]: DEBUG nova.virt.hardware [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 742.204978] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870b35c3-c9f8-4574-8bcd-692bd235e273 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.223758] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9329e5a4-810d-456d-a3aa-f1a348177614 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.235808] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.70878} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.244446] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] e2d39c43-6666-4fda-b8e2-485399c59e46/e2d39c43-6666-4fda-b8e2-485399c59e46.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 742.248019] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 742.248019] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e9379c7-f54c-440b-9f0c-148406f8cf2d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.260368] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735326, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.670853} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.261860] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 50e4934b-b9b1-4887-b5d1-95a37fbf4c41/50e4934b-b9b1-4887-b5d1-95a37fbf4c41.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 742.262224] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 742.262559] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 742.262559] env[63028]: value = "task-2735328" [ 742.262559] env[63028]: _type = "Task" [ 742.262559] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.262771] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-551af212-ba42-41d5-a240-ea3d3ab7c1bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.275135] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735328, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.276643] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 742.276643] env[63028]: value = "task-2735329" [ 742.276643] env[63028]: _type = "Task" [ 742.276643] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.287040] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735329, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.357774] env[63028]: DEBUG nova.objects.base [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Object Instance<6e0959ac-8fca-47eb-b501-b50a3e9f025a> lazy-loaded attributes: flavor,info_cache {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 742.494939] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735327, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.511346] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.381s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.514774] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.021s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.520054] env[63028]: INFO nova.compute.claims [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.556684] env[63028]: INFO nova.scheduler.client.report [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleted allocations for instance b9db75ba-6832-45e8-8faf-d1cdaa7dabdd [ 742.590147] env[63028]: DEBUG oslo_concurrency.lockutils [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Acquiring lock "refresh_cache-600195de-ceb4-41a6-9ade-dda8b898e4db" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.590147] env[63028]: DEBUG oslo_concurrency.lockutils [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Acquired lock "refresh_cache-600195de-ceb4-41a6-9ade-dda8b898e4db" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.590147] env[63028]: DEBUG nova.network.neutron [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 742.775307] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735328, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.229696} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.775609] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 742.776537] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9da3f9-47f3-40c0-bf37-48de2f9f4a34 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.802817] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735329, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077822} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.813402] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] e2d39c43-6666-4fda-b8e2-485399c59e46/e2d39c43-6666-4fda-b8e2-485399c59e46.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 742.813762] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 742.814206] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6b83f86-40dc-4521-a3a0-325330b076ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.830370] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e111cc-e6c8-4027-8392-095b073cf604 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.856500] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] 50e4934b-b9b1-4887-b5d1-95a37fbf4c41/50e4934b-b9b1-4887-b5d1-95a37fbf4c41.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 742.858152] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f075007-9f38-4fe5-820b-9db7836cf9f3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.873906] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 742.873906] env[63028]: value = "task-2735330" [ 742.873906] env[63028]: _type = "Task" [ 742.873906] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.884904] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 742.884904] env[63028]: value = "task-2735331" [ 742.884904] env[63028]: _type = "Task" [ 742.884904] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.889239] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735330, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.900270] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735331, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.991976] env[63028]: DEBUG oslo_vmware.api [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735327, 'name': PowerOnVM_Task, 'duration_secs': 1.332084} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.992168] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 742.992397] env[63028]: INFO nova.compute.manager [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Took 9.11 seconds to spawn the instance on the hypervisor. [ 742.992583] env[63028]: DEBUG nova.compute.manager [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 742.993403] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a14033-55da-45b1-b59e-a0fa3c406dba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.035190] env[63028]: DEBUG oslo_concurrency.lockutils [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Acquiring lock "8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.035190] env[63028]: DEBUG oslo_concurrency.lockutils [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Lock "8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.035190] env[63028]: DEBUG oslo_concurrency.lockutils [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Acquiring lock "8c7c8713-d5d7-490e-aba5-25d98bfbfaa0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.035707] env[63028]: DEBUG oslo_concurrency.lockutils [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Lock "8c7c8713-d5d7-490e-aba5-25d98bfbfaa0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.036117] env[63028]: DEBUG oslo_concurrency.lockutils [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Lock "8c7c8713-d5d7-490e-aba5-25d98bfbfaa0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.038020] env[63028]: INFO nova.compute.manager [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Terminating instance [ 743.066745] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a306740-83e5-417a-bf6b-e5a0c8b16cc8 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "b9db75ba-6832-45e8-8faf-d1cdaa7dabdd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.406s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.867090] env[63028]: DEBUG nova.network.neutron [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Successfully updated port: 9917de69-098c-41fd-8a7e-63885001786a {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 743.879270] env[63028]: DEBUG nova.compute.manager [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 743.879861] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 743.881069] env[63028]: DEBUG nova.compute.manager [req-0ea1523c-6567-46e2-9e8d-9692946b0559 req-0d4665c9-7901-4c25-9ee3-dfe2411ba97d service nova] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Received event network-vif-plugged-9917de69-098c-41fd-8a7e-63885001786a {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 743.881214] env[63028]: DEBUG oslo_concurrency.lockutils [req-0ea1523c-6567-46e2-9e8d-9692946b0559 req-0d4665c9-7901-4c25-9ee3-dfe2411ba97d service nova] Acquiring lock "1316318e-8dcf-4ac2-b40a-6a3ab6964997-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.881415] env[63028]: DEBUG oslo_concurrency.lockutils [req-0ea1523c-6567-46e2-9e8d-9692946b0559 req-0d4665c9-7901-4c25-9ee3-dfe2411ba97d service nova] Lock "1316318e-8dcf-4ac2-b40a-6a3ab6964997-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.881780] env[63028]: DEBUG oslo_concurrency.lockutils [req-0ea1523c-6567-46e2-9e8d-9692946b0559 req-0d4665c9-7901-4c25-9ee3-dfe2411ba97d service nova] Lock "1316318e-8dcf-4ac2-b40a-6a3ab6964997-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.881780] env[63028]: DEBUG nova.compute.manager [req-0ea1523c-6567-46e2-9e8d-9692946b0559 req-0d4665c9-7901-4c25-9ee3-dfe2411ba97d service nova] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] No waiting events found dispatching network-vif-plugged-9917de69-098c-41fd-8a7e-63885001786a {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 743.881897] env[63028]: WARNING nova.compute.manager [req-0ea1523c-6567-46e2-9e8d-9692946b0559 req-0d4665c9-7901-4c25-9ee3-dfe2411ba97d service nova] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Received unexpected event network-vif-plugged-9917de69-098c-41fd-8a7e-63885001786a for instance with vm_state building and task_state spawning. [ 743.892306] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce0b6c34-7da3-4971-bcf1-d57e573aa364 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.894132] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Acquiring lock "e346c31b-ef1b-4f75-8564-cefe26bd672f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.894363] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Lock "e346c31b-ef1b-4f75-8564-cefe26bd672f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.894810] env[63028]: INFO nova.compute.manager [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Took 51.93 seconds to build instance. [ 743.904904] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735330, 'name': ReconfigVM_Task, 'duration_secs': 0.958428} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.909249] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Reconfigured VM instance instance-00000028 to attach disk [datastore1] e2d39c43-6666-4fda-b8e2-485399c59e46/e2d39c43-6666-4fda-b8e2-485399c59e46.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 743.909656] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735331, 'name': ReconfigVM_Task, 'duration_secs': 0.303127} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.909992] env[63028]: DEBUG oslo_vmware.api [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Waiting for the task: (returnval){ [ 743.909992] env[63028]: value = "task-2735332" [ 743.909992] env[63028]: _type = "Task" [ 743.909992] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.910210] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72cd2e58-0a58-4c08-b417-f50f689c486f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.911924] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Reconfigured VM instance instance-0000002b to attach disk [datastore2] 50e4934b-b9b1-4887-b5d1-95a37fbf4c41/50e4934b-b9b1-4887-b5d1-95a37fbf4c41.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 743.913396] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d3fdd42-3c59-4a75-b7cc-a173f63fef4f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.935473] env[63028]: DEBUG oslo_vmware.api [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.935798] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 743.935798] env[63028]: value = "task-2735333" [ 743.935798] env[63028]: _type = "Task" [ 743.935798] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.936051] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 743.936051] env[63028]: value = "task-2735334" [ 743.936051] env[63028]: _type = "Task" [ 743.936051] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.936936] env[63028]: DEBUG nova.network.neutron [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.960521] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735334, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.960814] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735333, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.232231] env[63028]: DEBUG nova.network.neutron [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Updating instance_info_cache with network_info: [{"id": "8f389aaf-a460-47ed-862a-e45d83b3d9e3", "address": "fa:16:3e:fe:2b:36", "network": {"id": "f8ae7dcc-bddb-41e4-b0eb-ae1fe84858f2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1381703882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1a2ecd6338148e6a90a71bf1fc5c778", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f389aaf-a4", "ovs_interfaceid": "8f389aaf-a460-47ed-862a-e45d83b3d9e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.328915] env[63028]: DEBUG nova.compute.manager [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Received event network-changed-e9be02f8-7ea6-45eb-a1cb-65fb95285caf {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 744.329345] env[63028]: DEBUG nova.compute.manager [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Refreshing instance network info cache due to event network-changed-e9be02f8-7ea6-45eb-a1cb-65fb95285caf. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 744.329634] env[63028]: DEBUG oslo_concurrency.lockutils [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] Acquiring lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.329816] env[63028]: DEBUG oslo_concurrency.lockutils [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] Acquired lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.329949] env[63028]: DEBUG nova.network.neutron [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Refreshing network info cache for port e9be02f8-7ea6-45eb-a1cb-65fb95285caf {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 744.386529] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "refresh_cache-1316318e-8dcf-4ac2-b40a-6a3ab6964997" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.386706] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquired lock "refresh_cache-1316318e-8dcf-4ac2-b40a-6a3ab6964997" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.386913] env[63028]: DEBUG nova.network.neutron [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 744.391281] env[63028]: DEBUG nova.network.neutron [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Updating instance_info_cache with network_info: [{"id": "f8cad445-9a0b-4d25-84a3-df0521f45d9f", "address": "fa:16:3e:35:4f:e7", "network": {"id": "c71964ff-29fc-4ff0-af97-0c3a5c1b0f0f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-584692412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f16a87f7b9b420eb1310ad086f4124c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cad445-9a", "ovs_interfaceid": "f8cad445-9a0b-4d25-84a3-df0521f45d9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.397814] env[63028]: DEBUG oslo_concurrency.lockutils [None req-767adebe-0612-4cb5-9387-d45867077656 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Lock "352ac7c3-17a8-4d7e-a66f-47ea7614892c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.701s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.425529] env[63028]: DEBUG oslo_vmware.api [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735332, 'name': PowerOffVM_Task, 'duration_secs': 0.228576} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.425821] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 744.426030] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 744.426414] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550626', 'volume_id': '39116d21-d007-4c27-9ce1-9f92bb99f75c', 'name': 'volume-39116d21-d007-4c27-9ce1-9f92bb99f75c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8c7c8713-d5d7-490e-aba5-25d98bfbfaa0', 'attached_at': '', 'detached_at': '', 'volume_id': '39116d21-d007-4c27-9ce1-9f92bb99f75c', 'serial': '39116d21-d007-4c27-9ce1-9f92bb99f75c'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 744.427434] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b8ee3c-0ca7-413d-a319-e45f8e1799e0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.459420] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ae347e-ec42-4f48-87e9-509552702ef8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.468060] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735333, 'name': Rename_Task, 'duration_secs': 0.190604} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.472214] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 744.472593] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735334, 'name': Rename_Task, 'duration_secs': 0.155231} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.474981] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68e6de19-750f-47d3-b9b8-7fcdf6a217db {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.476822] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f065d6-12f7-4277-aba4-4e773ecaf774 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.479083] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 744.480251] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c945e8d9-8044-4faf-a1f2-d3bcbcd11a47 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.501883] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e1840b-50e7-4662-8e5f-942e2afe1433 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.505101] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 744.505101] env[63028]: value = "task-2735336" [ 744.505101] env[63028]: _type = "Task" [ 744.505101] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.505574] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 744.505574] env[63028]: value = "task-2735335" [ 744.505574] env[63028]: _type = "Task" [ 744.505574] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.523028] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] The volume has not been displaced from its original location: [datastore1] volume-39116d21-d007-4c27-9ce1-9f92bb99f75c/volume-39116d21-d007-4c27-9ce1-9f92bb99f75c.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 744.528778] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Reconfiguring VM instance instance-00000020 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 744.530604] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67b74cfd-7fd3-4e74-a41e-482a85706377 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.543854] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb230e97-3f9b-4bd1-a92b-65c811e5ded8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.552661] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735336, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.553145] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735335, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.558806] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ab642a-7ac4-4c2e-ab5f-2b067d2badd6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.563295] env[63028]: DEBUG oslo_vmware.api [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Waiting for the task: (returnval){ [ 744.563295] env[63028]: value = "task-2735337" [ 744.563295] env[63028]: _type = "Task" [ 744.563295] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.597158] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c91203a-9259-4fc1-b9cf-b7463daf56d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.600213] env[63028]: DEBUG oslo_vmware.api [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735337, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.606348] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b554a10c-9b4a-451f-8514-090208dfe717 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.621336] env[63028]: DEBUG nova.compute.provider_tree [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.737936] env[63028]: DEBUG oslo_concurrency.lockutils [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Releasing lock "refresh_cache-6e0959ac-8fca-47eb-b501-b50a3e9f025a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.895814] env[63028]: DEBUG oslo_concurrency.lockutils [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Releasing lock "refresh_cache-600195de-ceb4-41a6-9ade-dda8b898e4db" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.896179] env[63028]: DEBUG nova.compute.manager [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Instance network_info: |[{"id": "f8cad445-9a0b-4d25-84a3-df0521f45d9f", "address": "fa:16:3e:35:4f:e7", "network": {"id": "c71964ff-29fc-4ff0-af97-0c3a5c1b0f0f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-584692412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f16a87f7b9b420eb1310ad086f4124c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cad445-9a", "ovs_interfaceid": "f8cad445-9a0b-4d25-84a3-df0521f45d9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 744.896634] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:4f:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11032cc2-b275-48d2-9c40-9455ea7d49e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8cad445-9a0b-4d25-84a3-df0521f45d9f', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 744.908467] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Creating folder: Project (7f16a87f7b9b420eb1310ad086f4124c). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 744.909163] env[63028]: DEBUG nova.compute.manager [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 744.915983] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71539fc0-bada-4fd0-b0b4-3b8a318f422b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.930915] env[63028]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 744.931188] env[63028]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63028) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 744.931552] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Folder already exists: Project (7f16a87f7b9b420eb1310ad086f4124c). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 744.931810] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Creating folder: Instances. Parent ref: group-v550659. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 744.932014] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22e48ee7-f60d-47aa-b911-b09683091a02 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.945805] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Created folder: Instances in parent group-v550659. [ 744.946241] env[63028]: DEBUG oslo.service.loopingcall [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 744.946623] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 744.946867] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f58e3d52-d6ba-4d1c-ab0e-be62a89b0e5b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.967643] env[63028]: DEBUG nova.network.neutron [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.977116] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 744.977116] env[63028]: value = "task-2735340" [ 744.977116] env[63028]: _type = "Task" [ 744.977116] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.989186] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735340, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.019998] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735335, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.023498] env[63028]: DEBUG oslo_vmware.api [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735336, 'name': PowerOnVM_Task, 'duration_secs': 0.468252} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.023747] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 745.023948] env[63028]: INFO nova.compute.manager [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Took 8.46 seconds to spawn the instance on the hypervisor. [ 745.024138] env[63028]: DEBUG nova.compute.manager [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 745.024913] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f65c4707-e011-4636-954c-f9dff65f1f8a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.081027] env[63028]: DEBUG oslo_vmware.api [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735337, 'name': ReconfigVM_Task, 'duration_secs': 0.197702} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.081027] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Reconfigured VM instance instance-00000020 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 745.087429] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce851ee7-11f6-48fc-9f08-33a3557040d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.106364] env[63028]: DEBUG oslo_vmware.api [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Waiting for the task: (returnval){ [ 745.106364] env[63028]: value = "task-2735341" [ 745.106364] env[63028]: _type = "Task" [ 745.106364] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.117986] env[63028]: DEBUG oslo_vmware.api [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735341, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.125431] env[63028]: DEBUG nova.scheduler.client.report [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 745.337031] env[63028]: DEBUG nova.network.neutron [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Updating instance_info_cache with network_info: [{"id": "9917de69-098c-41fd-8a7e-63885001786a", "address": "fa:16:3e:06:ad:05", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.196", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9917de69-09", "ovs_interfaceid": "9917de69-098c-41fd-8a7e-63885001786a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.354433] env[63028]: DEBUG nova.objects.instance [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lazy-loading 'flavor' on Instance uuid 0e07a6cd-8c99-408d-95ba-63f7839c327f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 745.356313] env[63028]: DEBUG nova.network.neutron [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updated VIF entry in instance network info cache for port e9be02f8-7ea6-45eb-a1cb-65fb95285caf. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 745.356313] env[63028]: DEBUG nova.network.neutron [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance_info_cache with network_info: [{"id": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "address": "fa:16:3e:cc:b1:42", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9be02f8-7e", "ovs_interfaceid": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.399347] env[63028]: DEBUG nova.compute.manager [req-fdf3333f-0a4f-437c-89e9-af53b9e1fac1 req-c9fed868-6019-4a77-9a7d-d298635d1a23 service nova] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Received event network-changed-9917de69-098c-41fd-8a7e-63885001786a {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 745.399551] env[63028]: DEBUG nova.compute.manager [req-fdf3333f-0a4f-437c-89e9-af53b9e1fac1 req-c9fed868-6019-4a77-9a7d-d298635d1a23 service nova] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Refreshing instance network info cache due to event network-changed-9917de69-098c-41fd-8a7e-63885001786a. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 745.399747] env[63028]: DEBUG oslo_concurrency.lockutils [req-fdf3333f-0a4f-437c-89e9-af53b9e1fac1 req-c9fed868-6019-4a77-9a7d-d298635d1a23 service nova] Acquiring lock "refresh_cache-1316318e-8dcf-4ac2-b40a-6a3ab6964997" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.437968] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.488720] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735340, 'name': CreateVM_Task, 'duration_secs': 0.44879} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.488898] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 745.491715] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'guest_format': None, 'disk_bus': None, 'mount_device': '/dev/sda', 'attachment_id': 'c29e7349-1680-4a80-8385-c29f4f3ae172', 'boot_index': 0, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550662', 'volume_id': '4246155e-0977-4f2a-b135-72a3849826ce', 'name': 'volume-4246155e-0977-4f2a-b135-72a3849826ce', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '600195de-ceb4-41a6-9ade-dda8b898e4db', 'attached_at': '', 'detached_at': '', 'volume_id': '4246155e-0977-4f2a-b135-72a3849826ce', 'serial': '4246155e-0977-4f2a-b135-72a3849826ce'}, 'volume_type': None}], 'swap': None} {{(pid=63028) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 745.491715] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Root volume attach. Driver type: vmdk {{(pid=63028) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 745.491715] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba273b7-2841-4a7b-b245-6658117c1e1d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.499582] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b18ee47-7a6d-4fbd-af19-48e264255db1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.506613] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6588c68-89d2-4d10-b120-374e843d725e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.517072] env[63028]: DEBUG oslo_vmware.api [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735335, 'name': PowerOnVM_Task, 'duration_secs': 0.594054} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.518748] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 745.518955] env[63028]: INFO nova.compute.manager [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Took 17.15 seconds to spawn the instance on the hypervisor. [ 745.519143] env[63028]: DEBUG nova.compute.manager [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 745.520502] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-602cd71f-34e6-469c-927b-e42907be0224 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.522869] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-c4a44d70-12ba-42e4-aa4d-200a9627cde4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.535154] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Waiting for the task: (returnval){ [ 745.535154] env[63028]: value = "task-2735342" [ 745.535154] env[63028]: _type = "Task" [ 745.535154] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.549941] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735342, 'name': RelocateVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.552099] env[63028]: INFO nova.compute.manager [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Took 46.83 seconds to build instance. [ 745.616023] env[63028]: DEBUG oslo_vmware.api [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735341, 'name': ReconfigVM_Task, 'duration_secs': 0.260028} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.617307] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550626', 'volume_id': '39116d21-d007-4c27-9ce1-9f92bb99f75c', 'name': 'volume-39116d21-d007-4c27-9ce1-9f92bb99f75c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8c7c8713-d5d7-490e-aba5-25d98bfbfaa0', 'attached_at': '', 'detached_at': '', 'volume_id': '39116d21-d007-4c27-9ce1-9f92bb99f75c', 'serial': '39116d21-d007-4c27-9ce1-9f92bb99f75c'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 745.617307] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 745.618042] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762d990d-4c20-4836-9b2c-2eb904d3a26a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.626331] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 745.626610] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69224443-4cc2-4f9f-b3f8-c793a4f481de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.635192] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.121s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.635526] env[63028]: DEBUG nova.compute.manager [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 745.638205] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.369s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.640114] env[63028]: INFO nova.compute.claims [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 745.744945] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 745.745221] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eead90bc-8068-44bd-a333-f9ca1def5a86 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.753568] env[63028]: DEBUG oslo_vmware.api [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 745.753568] env[63028]: value = "task-2735344" [ 745.753568] env[63028]: _type = "Task" [ 745.753568] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.762760] env[63028]: DEBUG oslo_vmware.api [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735344, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.816525] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Acquiring lock "352ac7c3-17a8-4d7e-a66f-47ea7614892c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.816810] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Lock "352ac7c3-17a8-4d7e-a66f-47ea7614892c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.817034] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Acquiring lock "352ac7c3-17a8-4d7e-a66f-47ea7614892c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.817216] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Lock "352ac7c3-17a8-4d7e-a66f-47ea7614892c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.817390] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Lock "352ac7c3-17a8-4d7e-a66f-47ea7614892c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.820023] env[63028]: INFO nova.compute.manager [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Terminating instance [ 745.840309] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Releasing lock "refresh_cache-1316318e-8dcf-4ac2-b40a-6a3ab6964997" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.840644] env[63028]: DEBUG nova.compute.manager [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Instance network_info: |[{"id": "9917de69-098c-41fd-8a7e-63885001786a", "address": "fa:16:3e:06:ad:05", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.196", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9917de69-09", "ovs_interfaceid": "9917de69-098c-41fd-8a7e-63885001786a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 745.841137] env[63028]: DEBUG oslo_concurrency.lockutils [req-fdf3333f-0a4f-437c-89e9-af53b9e1fac1 req-c9fed868-6019-4a77-9a7d-d298635d1a23 service nova] Acquired lock "refresh_cache-1316318e-8dcf-4ac2-b40a-6a3ab6964997" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.841325] env[63028]: DEBUG nova.network.neutron [req-fdf3333f-0a4f-437c-89e9-af53b9e1fac1 req-c9fed868-6019-4a77-9a7d-d298635d1a23 service nova] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Refreshing network info cache for port 9917de69-098c-41fd-8a7e-63885001786a {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 745.842538] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:ad:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9917de69-098c-41fd-8a7e-63885001786a', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 745.854521] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Creating folder: Project (10a088f86d7a4b4db39113824aec83b7). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 745.855967] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb474997-a0b9-4f8b-bbcf-366acf3a96af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.861751] env[63028]: DEBUG oslo_concurrency.lockutils [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] Releasing lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.862225] env[63028]: DEBUG nova.compute.manager [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Received event network-changed-f8cad445-9a0b-4d25-84a3-df0521f45d9f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 745.862553] env[63028]: DEBUG nova.compute.manager [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Refreshing instance network info cache due to event network-changed-f8cad445-9a0b-4d25-84a3-df0521f45d9f. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 745.862933] env[63028]: DEBUG oslo_concurrency.lockutils [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] Acquiring lock "refresh_cache-600195de-ceb4-41a6-9ade-dda8b898e4db" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.863257] env[63028]: DEBUG oslo_concurrency.lockutils [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] Acquired lock "refresh_cache-600195de-ceb4-41a6-9ade-dda8b898e4db" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.863590] env[63028]: DEBUG nova.network.neutron [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Refreshing network info cache for port f8cad445-9a0b-4d25-84a3-df0521f45d9f {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 745.871392] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquiring lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.874885] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquired lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.888289] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Created folder: Project (10a088f86d7a4b4db39113824aec83b7) in parent group-v550570. [ 745.888289] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Creating folder: Instances. Parent ref: group-v550711. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 745.888289] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aff91ac8-5330-4844-aee6-eb4e9da78cd9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.904020] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Created folder: Instances in parent group-v550711. [ 745.904020] env[63028]: DEBUG oslo.service.loopingcall [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 745.904020] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 745.904020] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-971a407f-6aee-4758-9674-6016e0cf7c53 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.927942] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 745.927942] env[63028]: value = "task-2735347" [ 745.927942] env[63028]: _type = "Task" [ 745.927942] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.938968] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735347, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.048698] env[63028]: INFO nova.compute.manager [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Took 57.77 seconds to build instance. [ 746.058303] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a6e1cdda-f79c-4f40-b760-2ed696cbf7f7 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "50e4934b-b9b1-4887-b5d1-95a37fbf4c41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.354s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.058303] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735342, 'name': RelocateVM_Task} progress is 20%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.148861] env[63028]: DEBUG nova.compute.utils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 746.150379] env[63028]: DEBUG nova.compute.manager [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 746.150635] env[63028]: DEBUG nova.network.neutron [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 746.266989] env[63028]: DEBUG oslo_vmware.api [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735344, 'name': PowerOnVM_Task, 'duration_secs': 0.471142} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.266989] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 746.266989] env[63028]: DEBUG nova.compute.manager [None req-74e9af90-8e58-4741-baf4-974da139bc79 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 746.267931] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04645a0-1e74-4c3d-aaf4-34654d02e9eb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.273062] env[63028]: DEBUG nova.policy [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '70f27358d4184787bd66379bf75fc4ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10a088f86d7a4b4db39113824aec83b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 746.324219] env[63028]: DEBUG nova.compute.manager [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 746.324508] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 746.325393] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7e17f0-5d57-4e1a-833e-a81c1b7d0520 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.334490] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 746.334810] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95317efb-fdf7-4673-ac48-54de46ccd059 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.342282] env[63028]: DEBUG oslo_vmware.api [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Waiting for the task: (returnval){ [ 746.342282] env[63028]: value = "task-2735348" [ 746.342282] env[63028]: _type = "Task" [ 746.342282] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.353784] env[63028]: DEBUG oslo_vmware.api [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735348, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.402530] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 746.402756] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 746.402975] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Deleting the datastore file [datastore1] 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 746.403321] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3cbc3a1-ac8d-40e6-8f9a-3f3ca5b54de9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.412464] env[63028]: DEBUG oslo_vmware.api [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Waiting for the task: (returnval){ [ 746.412464] env[63028]: value = "task-2735349" [ 746.412464] env[63028]: _type = "Task" [ 746.412464] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.422307] env[63028]: DEBUG oslo_vmware.api [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735349, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.437860] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735347, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.519753] env[63028]: INFO nova.compute.manager [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Rebuilding instance [ 746.557933] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3ea37b67-73e0-4219-b828-a126fbc237e2 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "e2d39c43-6666-4fda-b8e2-485399c59e46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.010s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.558259] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735342, 'name': RelocateVM_Task, 'duration_secs': 0.572329} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.560614] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 746.560614] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550662', 'volume_id': '4246155e-0977-4f2a-b135-72a3849826ce', 'name': 'volume-4246155e-0977-4f2a-b135-72a3849826ce', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '600195de-ceb4-41a6-9ade-dda8b898e4db', 'attached_at': '', 'detached_at': '', 'volume_id': '4246155e-0977-4f2a-b135-72a3849826ce', 'serial': '4246155e-0977-4f2a-b135-72a3849826ce'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 746.561310] env[63028]: DEBUG nova.compute.manager [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 746.564513] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91fe88f-603d-40df-95b6-aec200d0f515 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.599720] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148ea499-2166-4e03-862c-8374b96b56e2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.603824] env[63028]: DEBUG nova.compute.manager [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 746.604664] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a6329d-310a-46ee-b7e3-e3871cffdff8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.640099] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] volume-4246155e-0977-4f2a-b135-72a3849826ce/volume-4246155e-0977-4f2a-b135-72a3849826ce.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 746.644178] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffe4f366-39f3-4f5a-bb15-b4174d7a34f7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.673020] env[63028]: DEBUG nova.compute.manager [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 746.683581] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Waiting for the task: (returnval){ [ 746.683581] env[63028]: value = "task-2735350" [ 746.683581] env[63028]: _type = "Task" [ 746.683581] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.696285] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735350, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.700148] env[63028]: DEBUG nova.network.neutron [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 746.780160] env[63028]: DEBUG nova.network.neutron [req-fdf3333f-0a4f-437c-89e9-af53b9e1fac1 req-c9fed868-6019-4a77-9a7d-d298635d1a23 service nova] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Updated VIF entry in instance network info cache for port 9917de69-098c-41fd-8a7e-63885001786a. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 746.780642] env[63028]: DEBUG nova.network.neutron [req-fdf3333f-0a4f-437c-89e9-af53b9e1fac1 req-c9fed868-6019-4a77-9a7d-d298635d1a23 service nova] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Updating instance_info_cache with network_info: [{"id": "9917de69-098c-41fd-8a7e-63885001786a", "address": "fa:16:3e:06:ad:05", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.196", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9917de69-09", "ovs_interfaceid": "9917de69-098c-41fd-8a7e-63885001786a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.854152] env[63028]: DEBUG oslo_vmware.api [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735348, 'name': PowerOffVM_Task, 'duration_secs': 0.307173} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.854422] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 746.854590] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 746.854876] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a80269f-62c7-4e8f-a5ba-2c0cdf5fce0e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.925744] env[63028]: DEBUG oslo_vmware.api [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Task: {'id': task-2735349, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136996} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.925992] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 746.926196] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 746.926416] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 746.926664] env[63028]: INFO nova.compute.manager [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Took 3.05 seconds to destroy the instance on the hypervisor. [ 746.926923] env[63028]: DEBUG oslo.service.loopingcall [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 746.927123] env[63028]: DEBUG nova.compute.manager [-] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 746.929262] env[63028]: DEBUG nova.network.neutron [-] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 746.933655] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 746.933704] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 746.933939] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Deleting the datastore file [datastore2] 352ac7c3-17a8-4d7e-a66f-47ea7614892c {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 746.934942] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37b7edec-b23d-4867-9790-02587ac0811e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.942696] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735347, 'name': CreateVM_Task, 'duration_secs': 0.748058} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.943469] env[63028]: DEBUG nova.network.neutron [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Updated VIF entry in instance network info cache for port f8cad445-9a0b-4d25-84a3-df0521f45d9f. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 746.943798] env[63028]: DEBUG nova.network.neutron [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Updating instance_info_cache with network_info: [{"id": "f8cad445-9a0b-4d25-84a3-df0521f45d9f", "address": "fa:16:3e:35:4f:e7", "network": {"id": "c71964ff-29fc-4ff0-af97-0c3a5c1b0f0f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-584692412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f16a87f7b9b420eb1310ad086f4124c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cad445-9a", "ovs_interfaceid": "f8cad445-9a0b-4d25-84a3-df0521f45d9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.945519] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 746.946409] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.946660] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.946891] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 746.947152] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd1870a8-b460-4d78-a3dd-11ab464547bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.951367] env[63028]: DEBUG oslo_vmware.api [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Waiting for the task: (returnval){ [ 746.951367] env[63028]: value = "task-2735352" [ 746.951367] env[63028]: _type = "Task" [ 746.951367] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.952118] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 746.952118] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52cfff27-56d1-7398-e50c-0c64a9615d16" [ 746.952118] env[63028]: _type = "Task" [ 746.952118] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.967705] env[63028]: DEBUG oslo_vmware.api [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735352, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.972104] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52cfff27-56d1-7398-e50c-0c64a9615d16, 'name': SearchDatastore_Task, 'duration_secs': 0.011394} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.972104] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.972104] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 746.972104] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.972104] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.972667] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 746.977777] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91989a2a-31eb-40cc-a978-185967f81d74 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.980929] env[63028]: DEBUG nova.compute.manager [req-35938219-4137-43bf-bb87-acea500834ca req-a0399376-6b95-4fcf-81d0-20164e61be88 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Received event network-changed-cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 746.981057] env[63028]: DEBUG nova.compute.manager [req-35938219-4137-43bf-bb87-acea500834ca req-a0399376-6b95-4fcf-81d0-20164e61be88 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Refreshing instance network info cache due to event network-changed-cc6af35d-7e46-40e6-bc97-40efda1ab807. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 746.981250] env[63028]: DEBUG oslo_concurrency.lockutils [req-35938219-4137-43bf-bb87-acea500834ca req-a0399376-6b95-4fcf-81d0-20164e61be88 service nova] Acquiring lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.990134] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 746.990330] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 746.991280] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75621db4-3cce-40a9-b6ba-d0c9f8d58731 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.997745] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 746.997745] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52dfb46e-d731-e3fe-56d5-dfc20448a144" [ 746.997745] env[63028]: _type = "Task" [ 746.997745] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.006398] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52dfb46e-d731-e3fe-56d5-dfc20448a144, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.014936] env[63028]: DEBUG nova.network.neutron [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Successfully created port: 968c9c64-3e8d-442c-8090-cc34f396bc28 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.062249] env[63028]: DEBUG nova.compute.manager [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 747.092720] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.166713] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "e2d39c43-6666-4fda-b8e2-485399c59e46" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.167836] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "e2d39c43-6666-4fda-b8e2-485399c59e46" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.167836] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "e2d39c43-6666-4fda-b8e2-485399c59e46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.167836] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "e2d39c43-6666-4fda-b8e2-485399c59e46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.167836] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "e2d39c43-6666-4fda-b8e2-485399c59e46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.173301] env[63028]: INFO nova.compute.manager [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Terminating instance [ 747.195492] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735350, 'name': ReconfigVM_Task, 'duration_secs': 0.424787} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.195607] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Reconfigured VM instance instance-0000002c to attach disk [datastore2] volume-4246155e-0977-4f2a-b135-72a3849826ce/volume-4246155e-0977-4f2a-b135-72a3849826ce.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 747.203749] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87b06669-1fe5-4753-8f1c-3c47fadda6fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.226860] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Waiting for the task: (returnval){ [ 747.226860] env[63028]: value = "task-2735353" [ 747.226860] env[63028]: _type = "Task" [ 747.226860] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.238951] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735353, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.283425] env[63028]: DEBUG oslo_concurrency.lockutils [req-fdf3333f-0a4f-437c-89e9-af53b9e1fac1 req-c9fed868-6019-4a77-9a7d-d298635d1a23 service nova] Releasing lock "refresh_cache-1316318e-8dcf-4ac2-b40a-6a3ab6964997" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.378036] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3932d179-5c51-43e9-838d-2c65262f61a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.389703] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868e045b-6174-4fd0-9bed-1419865f6afd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.430251] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58eb8e0-9f87-4ca6-abec-5c580cfe6234 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.439163] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f077b527-b4e9-4d86-8fdb-3f7fe3985e71 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.453869] env[63028]: DEBUG oslo_concurrency.lockutils [req-735b5e7d-553a-455d-90cd-ded12a1bb8de req-b585002f-f91a-4308-b30e-e94de65a50ce service nova] Releasing lock "refresh_cache-600195de-ceb4-41a6-9ade-dda8b898e4db" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.456157] env[63028]: DEBUG nova.compute.provider_tree [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.465679] env[63028]: DEBUG oslo_vmware.api [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Task: {'id': task-2735352, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289804} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.465956] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 747.466157] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 747.466335] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 747.466528] env[63028]: INFO nova.compute.manager [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 747.466767] env[63028]: DEBUG oslo.service.loopingcall [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 747.466956] env[63028]: DEBUG nova.compute.manager [-] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 747.467061] env[63028]: DEBUG nova.network.neutron [-] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 747.508144] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52dfb46e-d731-e3fe-56d5-dfc20448a144, 'name': SearchDatastore_Task, 'duration_secs': 0.010704} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.511240] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb295c5d-9887-4c7b-87c2-5f9a55f7c9af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.518014] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 747.518014] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529be441-6a9c-df0b-da11-e0e5856db522" [ 747.518014] env[63028]: _type = "Task" [ 747.518014] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.527577] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529be441-6a9c-df0b-da11-e0e5856db522, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.584959] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.677051] env[63028]: DEBUG nova.compute.manager [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 747.677304] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 747.678525] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22cb9618-add0-4bb8-9453-997fe84e475e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.683559] env[63028]: DEBUG nova.compute.manager [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 747.685755] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 747.686584] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33b20b79-9af9-4a2c-af56-47c7ae4304f3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.692081] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 747.692532] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f22f24c-381d-40ce-a3de-1e0108d8bc19 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.698330] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 747.698330] env[63028]: value = "task-2735354" [ 747.698330] env[63028]: _type = "Task" [ 747.698330] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.703590] env[63028]: DEBUG oslo_vmware.api [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 747.703590] env[63028]: value = "task-2735355" [ 747.703590] env[63028]: _type = "Task" [ 747.703590] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.716045] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.724823] env[63028]: DEBUG oslo_vmware.api [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.727856] env[63028]: DEBUG nova.virt.hardware [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 747.728034] env[63028]: DEBUG nova.virt.hardware [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.728394] env[63028]: DEBUG nova.virt.hardware [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 747.728712] env[63028]: DEBUG nova.virt.hardware [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.729018] env[63028]: DEBUG nova.virt.hardware [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 747.729080] env[63028]: DEBUG nova.virt.hardware [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 747.729682] env[63028]: DEBUG nova.virt.hardware [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 747.729682] env[63028]: DEBUG nova.virt.hardware [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 747.729916] env[63028]: DEBUG nova.virt.hardware [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 747.730119] env[63028]: DEBUG nova.virt.hardware [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 747.730302] env[63028]: DEBUG nova.virt.hardware [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 747.731498] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f946fd6-543d-45c0-9bb0-ea60bb2b558b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.746953] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735353, 'name': ReconfigVM_Task, 'duration_secs': 0.205566} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.749450] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550662', 'volume_id': '4246155e-0977-4f2a-b135-72a3849826ce', 'name': 'volume-4246155e-0977-4f2a-b135-72a3849826ce', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '600195de-ceb4-41a6-9ade-dda8b898e4db', 'attached_at': '', 'detached_at': '', 'volume_id': '4246155e-0977-4f2a-b135-72a3849826ce', 'serial': '4246155e-0977-4f2a-b135-72a3849826ce'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 747.750176] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69f2c477-9c65-44dc-9cd3-fb46f0927fa7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.753307] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4edd7f1-fdbf-46a0-910e-e8b83e3dc247 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.777432] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Waiting for the task: (returnval){ [ 747.777432] env[63028]: value = "task-2735356" [ 747.777432] env[63028]: _type = "Task" [ 747.777432] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.788801] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735356, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.789967] env[63028]: DEBUG nova.network.neutron [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updating instance_info_cache with network_info: [{"id": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "address": "fa:16:3e:a3:ea:21", "network": {"id": "bbda1d1e-8f2d-4594-9711-57cd1f5c1d06", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-842445812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ed58d3e63604c2fac29e5744fd7f0bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6af35d-7e", "ovs_interfaceid": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.961663] env[63028]: DEBUG nova.scheduler.client.report [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 748.031342] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529be441-6a9c-df0b-da11-e0e5856db522, 'name': SearchDatastore_Task, 'duration_secs': 0.015107} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.031342] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.031342] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 1316318e-8dcf-4ac2-b40a-6a3ab6964997/1316318e-8dcf-4ac2-b40a-6a3ab6964997.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 748.031342] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-848a7d2c-0a47-4132-9c7c-685643746f90 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.038734] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 748.038734] env[63028]: value = "task-2735357" [ 748.038734] env[63028]: _type = "Task" [ 748.038734] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.048067] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735357, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.133103] env[63028]: DEBUG nova.compute.manager [req-0f2e91b3-40a3-466e-931e-b07b20e2d934 req-946b4740-829c-4166-b65a-fc25225913bd service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Received event network-vif-deleted-9197f89b-957f-4d27-a314-ca95bd44a77d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 748.133103] env[63028]: INFO nova.compute.manager [req-0f2e91b3-40a3-466e-931e-b07b20e2d934 req-946b4740-829c-4166-b65a-fc25225913bd service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Neutron deleted interface 9197f89b-957f-4d27-a314-ca95bd44a77d; detaching it from the instance and deleting it from the info cache [ 748.133103] env[63028]: DEBUG nova.network.neutron [req-0f2e91b3-40a3-466e-931e-b07b20e2d934 req-946b4740-829c-4166-b65a-fc25225913bd service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.206123] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735354, 'name': PowerOffVM_Task, 'duration_secs': 0.24116} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.209202] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 748.209443] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 748.210849] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6612688-79fd-4eee-9a6f-710941b9427b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.219485] env[63028]: DEBUG oslo_vmware.api [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735355, 'name': PowerOffVM_Task, 'duration_secs': 0.219771} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.221443] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 748.222858] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 748.223524] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 748.223776] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3180230-ebb8-4116-be4f-fd3fa5b6b13c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.225396] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9865fcbf-78b1-4f98-a96b-b68a25ddc481 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.288682] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735356, 'name': Rename_Task, 'duration_secs': 0.169797} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.288916] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 748.289304] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93f41671-b4dd-4f96-a191-0be796573e8f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.293063] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Releasing lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.293381] env[63028]: DEBUG nova.compute.manager [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Inject network info {{(pid=63028) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 748.293637] env[63028]: DEBUG nova.compute.manager [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] network_info to inject: |[{"id": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "address": "fa:16:3e:a3:ea:21", "network": {"id": "bbda1d1e-8f2d-4594-9711-57cd1f5c1d06", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-842445812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ed58d3e63604c2fac29e5744fd7f0bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6af35d-7e", "ovs_interfaceid": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 748.299063] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Reconfiguring VM instance to set the machine id {{(pid=63028) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 748.300756] env[63028]: DEBUG oslo_concurrency.lockutils [req-35938219-4137-43bf-bb87-acea500834ca req-a0399376-6b95-4fcf-81d0-20164e61be88 service nova] Acquired lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.300968] env[63028]: DEBUG nova.network.neutron [req-35938219-4137-43bf-bb87-acea500834ca req-a0399376-6b95-4fcf-81d0-20164e61be88 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Refreshing network info cache for port cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 748.302251] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9ecb6c9-5b14-4c20-9c29-fd35a485ae09 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.313903] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Waiting for the task: (returnval){ [ 748.313903] env[63028]: value = "task-2735360" [ 748.313903] env[63028]: _type = "Task" [ 748.313903] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.328837] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735360, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.329482] env[63028]: DEBUG oslo_vmware.api [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for the task: (returnval){ [ 748.329482] env[63028]: value = "task-2735361" [ 748.329482] env[63028]: _type = "Task" [ 748.329482] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.335224] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 748.335389] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 748.335433] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleting the datastore file [datastore2] 50e4934b-b9b1-4887-b5d1-95a37fbf4c41 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 748.336135] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfb19285-12bb-48ae-9428-b67ae649402d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.350373] env[63028]: DEBUG oslo_vmware.api [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735361, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.352175] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 748.352175] env[63028]: value = "task-2735362" [ 748.352175] env[63028]: _type = "Task" [ 748.352175] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.365558] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735362, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.410215] env[63028]: DEBUG nova.network.neutron [-] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.411027] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 748.411323] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 748.411508] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Deleting the datastore file [datastore1] e2d39c43-6666-4fda-b8e2-485399c59e46 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 748.412239] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a67d771-8284-41ef-9df1-c515faedcc05 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.422548] env[63028]: DEBUG oslo_vmware.api [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 748.422548] env[63028]: value = "task-2735363" [ 748.422548] env[63028]: _type = "Task" [ 748.422548] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.431196] env[63028]: DEBUG nova.network.neutron [-] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.443305] env[63028]: DEBUG oslo_vmware.api [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735363, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.467301] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.829s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.467699] env[63028]: DEBUG nova.compute.manager [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 748.471394] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.738s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.471394] env[63028]: DEBUG nova.objects.instance [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lazy-loading 'resources' on Instance uuid c3014718-1064-4ab9-9600-86490489ee4b {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 748.514243] env[63028]: DEBUG nova.objects.instance [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lazy-loading 'flavor' on Instance uuid 0e07a6cd-8c99-408d-95ba-63f7839c327f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 748.555351] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735357, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.635029] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e69cb8f7-9f27-4d81-bc43-2222027bb42b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.647014] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf4ca8b-95b6-41b6-bf01-e7610d8ce89d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.685247] env[63028]: DEBUG nova.compute.manager [req-0f2e91b3-40a3-466e-931e-b07b20e2d934 req-946b4740-829c-4166-b65a-fc25225913bd service nova] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Detach interface failed, port_id=9197f89b-957f-4d27-a314-ca95bd44a77d, reason: Instance 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 748.828760] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735360, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.839267] env[63028]: DEBUG oslo_vmware.api [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735361, 'name': ReconfigVM_Task, 'duration_secs': 0.241968} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.839267] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43b6e421-50ad-4acf-9972-89c555d7d902 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Reconfigured VM instance to set the machine id {{(pid=63028) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 748.866407] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735362, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.409368} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.870024] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 748.870024] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 748.870024] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 748.914594] env[63028]: INFO nova.compute.manager [-] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Took 1.99 seconds to deallocate network for instance. [ 748.941163] env[63028]: DEBUG oslo_vmware.api [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735363, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.948351] env[63028]: INFO nova.compute.manager [-] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Took 1.48 seconds to deallocate network for instance. [ 748.977026] env[63028]: DEBUG nova.compute.utils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 748.977026] env[63028]: DEBUG nova.compute.manager [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 748.977026] env[63028]: DEBUG nova.network.neutron [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 749.019901] env[63028]: DEBUG oslo_concurrency.lockutils [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquiring lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.024682] env[63028]: DEBUG nova.policy [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32a3a4c8b9ef46bb9f1d927769b9dad9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d3152ab577947b28de82f4801285f8c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 749.051802] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735357, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.679065} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.054354] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 1316318e-8dcf-4ac2-b40a-6a3ab6964997/1316318e-8dcf-4ac2-b40a-6a3ab6964997.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 749.054569] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 749.054998] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6870524a-8c5a-4e86-81c7-6c3430ab7db9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.063263] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 749.063263] env[63028]: value = "task-2735364" [ 749.063263] env[63028]: _type = "Task" [ 749.063263] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.076716] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735364, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.082106] env[63028]: DEBUG nova.network.neutron [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Successfully updated port: 968c9c64-3e8d-442c-8090-cc34f396bc28 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 749.251793] env[63028]: DEBUG nova.network.neutron [req-35938219-4137-43bf-bb87-acea500834ca req-a0399376-6b95-4fcf-81d0-20164e61be88 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updated VIF entry in instance network info cache for port cc6af35d-7e46-40e6-bc97-40efda1ab807. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 749.252201] env[63028]: DEBUG nova.network.neutron [req-35938219-4137-43bf-bb87-acea500834ca req-a0399376-6b95-4fcf-81d0-20164e61be88 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updating instance_info_cache with network_info: [{"id": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "address": "fa:16:3e:a3:ea:21", "network": {"id": "bbda1d1e-8f2d-4594-9711-57cd1f5c1d06", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-842445812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ed58d3e63604c2fac29e5744fd7f0bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6af35d-7e", "ovs_interfaceid": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.285814] env[63028]: DEBUG nova.compute.manager [req-f7efadff-6fe3-4ae0-9bc0-734e20f637ea req-c68b2bb2-f3d0-46d9-8d23-4cfcb276f8f7 service nova] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Received event network-vif-deleted-07a0490c-4de0-43cb-be3e-6a99cf23e4bb {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 749.285814] env[63028]: DEBUG nova.compute.manager [req-f7efadff-6fe3-4ae0-9bc0-734e20f637ea req-c68b2bb2-f3d0-46d9-8d23-4cfcb276f8f7 service nova] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Received event network-vif-plugged-968c9c64-3e8d-442c-8090-cc34f396bc28 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 749.285996] env[63028]: DEBUG oslo_concurrency.lockutils [req-f7efadff-6fe3-4ae0-9bc0-734e20f637ea req-c68b2bb2-f3d0-46d9-8d23-4cfcb276f8f7 service nova] Acquiring lock "5982cd5d-abf1-42d4-bb44-8d79de599f11-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.286586] env[63028]: DEBUG oslo_concurrency.lockutils [req-f7efadff-6fe3-4ae0-9bc0-734e20f637ea req-c68b2bb2-f3d0-46d9-8d23-4cfcb276f8f7 service nova] Lock "5982cd5d-abf1-42d4-bb44-8d79de599f11-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.286586] env[63028]: DEBUG oslo_concurrency.lockutils [req-f7efadff-6fe3-4ae0-9bc0-734e20f637ea req-c68b2bb2-f3d0-46d9-8d23-4cfcb276f8f7 service nova] Lock "5982cd5d-abf1-42d4-bb44-8d79de599f11-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.286721] env[63028]: DEBUG nova.compute.manager [req-f7efadff-6fe3-4ae0-9bc0-734e20f637ea req-c68b2bb2-f3d0-46d9-8d23-4cfcb276f8f7 service nova] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] No waiting events found dispatching network-vif-plugged-968c9c64-3e8d-442c-8090-cc34f396bc28 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 749.287026] env[63028]: WARNING nova.compute.manager [req-f7efadff-6fe3-4ae0-9bc0-734e20f637ea req-c68b2bb2-f3d0-46d9-8d23-4cfcb276f8f7 service nova] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Received unexpected event network-vif-plugged-968c9c64-3e8d-442c-8090-cc34f396bc28 for instance with vm_state building and task_state spawning. [ 749.333713] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735360, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.436321] env[63028]: DEBUG oslo_vmware.api [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735363, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.455303] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.483766] env[63028]: DEBUG nova.compute.manager [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 749.491290] env[63028]: INFO nova.compute.manager [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Took 0.58 seconds to detach 1 volumes for instance. [ 749.496727] env[63028]: DEBUG nova.network.neutron [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Successfully created port: dfcc9f28-fdc3-4d22-a5a0-b2704f142312 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 749.502257] env[63028]: DEBUG nova.compute.manager [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Deleting volume: 39116d21-d007-4c27-9ce1-9f92bb99f75c {{(pid=63028) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 749.555418] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9305d2a8-fe8f-4019-aa59-025e97bbf13d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.570370] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b70399-79c1-4b6d-9af4-4d1088fc3c82 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.582604] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735364, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078979} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.606821] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 749.607630] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "refresh_cache-5982cd5d-abf1-42d4-bb44-8d79de599f11" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.607630] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquired lock "refresh_cache-5982cd5d-abf1-42d4-bb44-8d79de599f11" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.607803] env[63028]: DEBUG nova.network.neutron [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.612443] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04bf896-ba6d-4c58-b599-b1f2940a62a0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.614597] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f94b0e-2908-472e-beb5-a0a393d821ba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.635089] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487754cb-f4cc-4caf-86bf-f14a7cb2e457 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.650052] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 1316318e-8dcf-4ac2-b40a-6a3ab6964997/1316318e-8dcf-4ac2-b40a-6a3ab6964997.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 749.650670] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6f8b2bc-1147-4513-aa6b-b45666f002d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.684026] env[63028]: DEBUG nova.compute.provider_tree [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.689039] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 749.689039] env[63028]: value = "task-2735366" [ 749.689039] env[63028]: _type = "Task" [ 749.689039] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.699165] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735366, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.754581] env[63028]: DEBUG oslo_concurrency.lockutils [req-35938219-4137-43bf-bb87-acea500834ca req-a0399376-6b95-4fcf-81d0-20164e61be88 service nova] Releasing lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.755129] env[63028]: DEBUG oslo_concurrency.lockutils [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquired lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.831092] env[63028]: DEBUG oslo_vmware.api [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735360, 'name': PowerOnVM_Task, 'duration_secs': 1.366814} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.831416] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 749.831799] env[63028]: INFO nova.compute.manager [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Took 8.68 seconds to spawn the instance on the hypervisor. [ 749.831923] env[63028]: DEBUG nova.compute.manager [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 749.832797] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f11cb0-b2ef-4303-b17b-322c388596ab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.912568] env[63028]: DEBUG nova.virt.hardware [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 749.912847] env[63028]: DEBUG nova.virt.hardware [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.912977] env[63028]: DEBUG nova.virt.hardware [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 749.913239] env[63028]: DEBUG nova.virt.hardware [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.913423] env[63028]: DEBUG nova.virt.hardware [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 749.913604] env[63028]: DEBUG nova.virt.hardware [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 749.913850] env[63028]: DEBUG nova.virt.hardware [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 749.914067] env[63028]: DEBUG nova.virt.hardware [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 749.914281] env[63028]: DEBUG nova.virt.hardware [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 749.914501] env[63028]: DEBUG nova.virt.hardware [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 749.914731] env[63028]: DEBUG nova.virt.hardware [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 749.915795] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee32fd39-478d-467b-b44b-54cf2ea6c2ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.926376] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee8d3c7-03e4-4822-b6f3-91ebd347d0ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.948360] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:ca:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5446413d-c3b0-4cd2-a962-62240db178ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6ea40f2-2217-44b1-bf3f-727a6649149a', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 749.963777] env[63028]: DEBUG oslo.service.loopingcall [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 749.968993] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 749.969361] env[63028]: DEBUG oslo_vmware.api [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735363, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.124506} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.969574] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b336ed15-a3ad-46bc-ae22-a8e3dda73800 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.989740] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 749.989888] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 749.990080] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 749.991654] env[63028]: INFO nova.compute.manager [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Took 2.31 seconds to destroy the instance on the hypervisor. [ 749.991936] env[63028]: DEBUG oslo.service.loopingcall [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 749.995420] env[63028]: DEBUG nova.compute.manager [-] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 749.995621] env[63028]: DEBUG nova.network.neutron [-] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 750.009699] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.009699] env[63028]: value = "task-2735367" [ 750.009699] env[63028]: _type = "Task" [ 750.009699] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.023413] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735367, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.065851] env[63028]: DEBUG oslo_concurrency.lockutils [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.188526] env[63028]: DEBUG nova.scheduler.client.report [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 750.216646] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735366, 'name': ReconfigVM_Task, 'duration_secs': 0.517239} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.217944] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 1316318e-8dcf-4ac2-b40a-6a3ab6964997/1316318e-8dcf-4ac2-b40a-6a3ab6964997.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 750.219276] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-369858c3-1d04-4e05-9e4d-d149856bd436 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.229224] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 750.229224] env[63028]: value = "task-2735368" [ 750.229224] env[63028]: _type = "Task" [ 750.229224] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.241647] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735368, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.353724] env[63028]: INFO nova.compute.manager [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Took 48.47 seconds to build instance. [ 750.438224] env[63028]: DEBUG nova.network.neutron [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.502244] env[63028]: DEBUG nova.compute.manager [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 750.520774] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735367, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.533042] env[63028]: DEBUG nova.virt.hardware [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 750.533042] env[63028]: DEBUG nova.virt.hardware [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 750.533170] env[63028]: DEBUG nova.virt.hardware [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 750.533289] env[63028]: DEBUG nova.virt.hardware [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 750.536328] env[63028]: DEBUG nova.virt.hardware [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 750.536328] env[63028]: DEBUG nova.virt.hardware [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 750.536328] env[63028]: DEBUG nova.virt.hardware [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 750.536328] env[63028]: DEBUG nova.virt.hardware [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 750.538186] env[63028]: DEBUG nova.virt.hardware [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 750.538449] env[63028]: DEBUG nova.virt.hardware [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 750.538671] env[63028]: DEBUG nova.virt.hardware [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 750.539561] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d462c30d-7e8c-4228-af61-2555c8e6648f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.548965] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febb1cbb-fbec-47e5-8f88-9c90f8935eba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.701213] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.228s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.704294] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.033s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.706741] env[63028]: INFO nova.compute.claims [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 750.735012] env[63028]: INFO nova.scheduler.client.report [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Deleted allocations for instance c3014718-1064-4ab9-9600-86490489ee4b [ 750.744066] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735368, 'name': Rename_Task, 'duration_secs': 0.182576} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.744586] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 750.744967] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65b172c4-55b2-413f-9901-8526705e3bd5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.755862] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 750.755862] env[63028]: value = "task-2735369" [ 750.755862] env[63028]: _type = "Task" [ 750.755862] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.768168] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735369, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.846534] env[63028]: DEBUG nova.network.neutron [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.857073] env[63028]: DEBUG oslo_concurrency.lockutils [None req-66952caa-4d08-4457-8103-f1724319cde7 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lock "600195de-ceb4-41a6-9ade-dda8b898e4db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.066s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.870321] env[63028]: DEBUG nova.network.neutron [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Updating instance_info_cache with network_info: [{"id": "968c9c64-3e8d-442c-8090-cc34f396bc28", "address": "fa:16:3e:f9:6a:80", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap968c9c64-3e", "ovs_interfaceid": "968c9c64-3e8d-442c-8090-cc34f396bc28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.025029] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735367, 'name': CreateVM_Task, 'duration_secs': 0.518348} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.025029] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 751.025029] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.025029] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.025029] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 751.025029] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee5c03b8-8b87-42a8-8778-795c321dc8b2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.028792] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 751.028792] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524a181d-72c3-3b3a-9333-471284f620aa" [ 751.028792] env[63028]: _type = "Task" [ 751.028792] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.040691] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524a181d-72c3-3b3a-9333-471284f620aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.249821] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c5c5f271-ef55-4824-84ba-509298f25f57 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "c3014718-1064-4ab9-9600-86490489ee4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.388s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.267359] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735369, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.279214] env[63028]: DEBUG nova.compute.manager [req-313824d6-62b4-4fbd-bd9a-c4d63d3f0dae req-5a1caa11-6ed8-4214-a0b0-23a4063832c6 service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Received event network-changed-f8cad445-9a0b-4d25-84a3-df0521f45d9f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 751.279214] env[63028]: DEBUG nova.compute.manager [req-313824d6-62b4-4fbd-bd9a-c4d63d3f0dae req-5a1caa11-6ed8-4214-a0b0-23a4063832c6 service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Refreshing instance network info cache due to event network-changed-f8cad445-9a0b-4d25-84a3-df0521f45d9f. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 751.279214] env[63028]: DEBUG oslo_concurrency.lockutils [req-313824d6-62b4-4fbd-bd9a-c4d63d3f0dae req-5a1caa11-6ed8-4214-a0b0-23a4063832c6 service nova] Acquiring lock "refresh_cache-600195de-ceb4-41a6-9ade-dda8b898e4db" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.279594] env[63028]: DEBUG oslo_concurrency.lockutils [req-313824d6-62b4-4fbd-bd9a-c4d63d3f0dae req-5a1caa11-6ed8-4214-a0b0-23a4063832c6 service nova] Acquired lock "refresh_cache-600195de-ceb4-41a6-9ade-dda8b898e4db" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.279664] env[63028]: DEBUG nova.network.neutron [req-313824d6-62b4-4fbd-bd9a-c4d63d3f0dae req-5a1caa11-6ed8-4214-a0b0-23a4063832c6 service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Refreshing network info cache for port f8cad445-9a0b-4d25-84a3-df0521f45d9f {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.293259] env[63028]: DEBUG nova.network.neutron [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Successfully updated port: dfcc9f28-fdc3-4d22-a5a0-b2704f142312 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 751.363306] env[63028]: DEBUG nova.compute.manager [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 751.374081] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Releasing lock "refresh_cache-5982cd5d-abf1-42d4-bb44-8d79de599f11" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.374999] env[63028]: DEBUG nova.compute.manager [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Instance network_info: |[{"id": "968c9c64-3e8d-442c-8090-cc34f396bc28", "address": "fa:16:3e:f9:6a:80", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap968c9c64-3e", "ovs_interfaceid": "968c9c64-3e8d-442c-8090-cc34f396bc28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 751.376238] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:6a:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '968c9c64-3e8d-442c-8090-cc34f396bc28', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.389145] env[63028]: DEBUG oslo.service.loopingcall [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.393163] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 751.393580] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c4ac639-19bf-4125-ab26-9287fec947c9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.421482] env[63028]: DEBUG nova.compute.manager [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Received event network-changed-968c9c64-3e8d-442c-8090-cc34f396bc28 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 751.421766] env[63028]: DEBUG nova.compute.manager [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Refreshing instance network info cache due to event network-changed-968c9c64-3e8d-442c-8090-cc34f396bc28. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 751.422035] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] Acquiring lock "refresh_cache-5982cd5d-abf1-42d4-bb44-8d79de599f11" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.422143] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] Acquired lock "refresh_cache-5982cd5d-abf1-42d4-bb44-8d79de599f11" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.422314] env[63028]: DEBUG nova.network.neutron [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Refreshing network info cache for port 968c9c64-3e8d-442c-8090-cc34f396bc28 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.425651] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.425651] env[63028]: value = "task-2735370" [ 751.425651] env[63028]: _type = "Task" [ 751.425651] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.438186] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735370, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.547648] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524a181d-72c3-3b3a-9333-471284f620aa, 'name': SearchDatastore_Task, 'duration_secs': 0.014714} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.547994] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.548358] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.548525] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.548704] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.548910] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 751.549237] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df4d1eb0-6ccc-4b44-9521-d1162446ea08 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.560296] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 751.560502] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 751.562036] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9eee904-9abc-4d0e-98dc-dda61f641a21 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.567663] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 751.567663] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f51829-026c-369f-b3a7-22969c4cf57e" [ 751.567663] env[63028]: _type = "Task" [ 751.567663] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.576882] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f51829-026c-369f-b3a7-22969c4cf57e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.711508] env[63028]: DEBUG nova.network.neutron [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updating instance_info_cache with network_info: [{"id": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "address": "fa:16:3e:a3:ea:21", "network": {"id": "bbda1d1e-8f2d-4594-9711-57cd1f5c1d06", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-842445812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ed58d3e63604c2fac29e5744fd7f0bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6af35d-7e", "ovs_interfaceid": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.770250] env[63028]: DEBUG oslo_vmware.api [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735369, 'name': PowerOnVM_Task, 'duration_secs': 0.838946} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.773883] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 751.774197] env[63028]: INFO nova.compute.manager [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Took 9.60 seconds to spawn the instance on the hypervisor. [ 751.774359] env[63028]: DEBUG nova.compute.manager [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 751.775710] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2b5da1-41a3-463c-b475-69e1ed652313 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.795202] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "refresh_cache-3e45e7f3-a34f-4eab-9fff-1c874c832e2a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.795580] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "refresh_cache-3e45e7f3-a34f-4eab-9fff-1c874c832e2a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.795803] env[63028]: DEBUG nova.network.neutron [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 751.873989] env[63028]: DEBUG nova.network.neutron [-] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.895352] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.941932] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735370, 'name': CreateVM_Task, 'duration_secs': 0.427994} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.942136] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 751.942913] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.943122] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.943435] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 751.943694] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2fb37bd-ab02-43d9-b88f-962a6c9b5e16 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.952120] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 751.952120] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525280da-4f42-07f9-55de-2eb723c83b22" [ 751.952120] env[63028]: _type = "Task" [ 751.952120] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.962155] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525280da-4f42-07f9-55de-2eb723c83b22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.004808] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "5a330ed9-c106-49f2-b524-a424e717b5ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.005094] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "5a330ed9-c106-49f2-b524-a424e717b5ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.005306] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "5a330ed9-c106-49f2-b524-a424e717b5ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.005493] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "5a330ed9-c106-49f2-b524-a424e717b5ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.005665] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "5a330ed9-c106-49f2-b524-a424e717b5ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.007787] env[63028]: INFO nova.compute.manager [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Terminating instance [ 752.083920] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f51829-026c-369f-b3a7-22969c4cf57e, 'name': SearchDatastore_Task, 'duration_secs': 0.014194} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.084769] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a93438e-9c56-4c08-ad13-c12aa2e120e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.091034] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 752.091034] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e9557e-7633-be57-482e-c94bed6bf1f8" [ 752.091034] env[63028]: _type = "Task" [ 752.091034] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.108335] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e9557e-7633-be57-482e-c94bed6bf1f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.113430] env[63028]: DEBUG nova.network.neutron [req-313824d6-62b4-4fbd-bd9a-c4d63d3f0dae req-5a1caa11-6ed8-4214-a0b0-23a4063832c6 service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Updated VIF entry in instance network info cache for port f8cad445-9a0b-4d25-84a3-df0521f45d9f. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.113755] env[63028]: DEBUG nova.network.neutron [req-313824d6-62b4-4fbd-bd9a-c4d63d3f0dae req-5a1caa11-6ed8-4214-a0b0-23a4063832c6 service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Updating instance_info_cache with network_info: [{"id": "f8cad445-9a0b-4d25-84a3-df0521f45d9f", "address": "fa:16:3e:35:4f:e7", "network": {"id": "c71964ff-29fc-4ff0-af97-0c3a5c1b0f0f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-584692412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f16a87f7b9b420eb1310ad086f4124c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cad445-9a", "ovs_interfaceid": "f8cad445-9a0b-4d25-84a3-df0521f45d9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.220854] env[63028]: DEBUG oslo_concurrency.lockutils [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Releasing lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.221156] env[63028]: DEBUG nova.compute.manager [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Inject network info {{(pid=63028) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 752.221500] env[63028]: DEBUG nova.compute.manager [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] network_info to inject: |[{"id": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "address": "fa:16:3e:a3:ea:21", "network": {"id": "bbda1d1e-8f2d-4594-9711-57cd1f5c1d06", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-842445812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ed58d3e63604c2fac29e5744fd7f0bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6af35d-7e", "ovs_interfaceid": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 752.226713] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Reconfiguring VM instance to set the machine id {{(pid=63028) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 752.227965] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35788fdc-64d1-4f49-8d7b-798a9c336316 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.239458] env[63028]: DEBUG nova.network.neutron [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Updated VIF entry in instance network info cache for port 968c9c64-3e8d-442c-8090-cc34f396bc28. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.239789] env[63028]: DEBUG nova.network.neutron [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Updating instance_info_cache with network_info: [{"id": "968c9c64-3e8d-442c-8090-cc34f396bc28", "address": "fa:16:3e:f9:6a:80", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap968c9c64-3e", "ovs_interfaceid": "968c9c64-3e8d-442c-8090-cc34f396bc28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.252306] env[63028]: DEBUG oslo_vmware.api [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for the task: (returnval){ [ 752.252306] env[63028]: value = "task-2735371" [ 752.252306] env[63028]: _type = "Task" [ 752.252306] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.263729] env[63028]: DEBUG oslo_vmware.api [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735371, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.300643] env[63028]: INFO nova.compute.manager [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Took 44.97 seconds to build instance. [ 752.303166] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "0d96ba8e-b46b-48ae-957c-cdc49762c395" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.303503] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "0d96ba8e-b46b-48ae-957c-cdc49762c395" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.303600] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "0d96ba8e-b46b-48ae-957c-cdc49762c395-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.303763] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "0d96ba8e-b46b-48ae-957c-cdc49762c395-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.307103] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "0d96ba8e-b46b-48ae-957c-cdc49762c395-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.312022] env[63028]: INFO nova.compute.manager [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Terminating instance [ 752.346159] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5a1942-23b0-4f22-aa2c-e5781fa94c61 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.357135] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f14458-93a1-4c9e-9e11-e905732de3ea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.364223] env[63028]: DEBUG nova.network.neutron [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.396948] env[63028]: INFO nova.compute.manager [-] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Took 2.40 seconds to deallocate network for instance. [ 752.400982] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377f01d2-79a5-4981-86e2-f9eb81bb6543 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.415166] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6dc2022-7fe6-4e1f-ba08-b1b9a44bd9c3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.434166] env[63028]: DEBUG nova.compute.provider_tree [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.466561] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525280da-4f42-07f9-55de-2eb723c83b22, 'name': SearchDatastore_Task, 'duration_secs': 0.018023} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.466869] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.467346] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 752.467615] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.467767] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.467972] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.470559] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fdf2cf7d-25df-43e9-82f7-2a6e5acb7803 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.481192] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.481427] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 752.482263] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09104a2c-420a-4411-8d05-094c40d80394 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.490759] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 752.490759] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520d0321-2a29-1af2-1d25-4f56d4ea2882" [ 752.490759] env[63028]: _type = "Task" [ 752.490759] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.499539] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520d0321-2a29-1af2-1d25-4f56d4ea2882, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.512068] env[63028]: DEBUG nova.compute.manager [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 752.512305] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 752.513150] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022c23fe-a6ae-49d7-a876-bf20ff6e0e46 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.521885] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 752.522190] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f909530f-2035-47a6-bf85-f7a657ccd7d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.530469] env[63028]: DEBUG oslo_vmware.api [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 752.530469] env[63028]: value = "task-2735372" [ 752.530469] env[63028]: _type = "Task" [ 752.530469] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.547532] env[63028]: DEBUG oslo_vmware.api [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735372, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.592566] env[63028]: DEBUG nova.network.neutron [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Updating instance_info_cache with network_info: [{"id": "dfcc9f28-fdc3-4d22-a5a0-b2704f142312", "address": "fa:16:3e:4e:7e:e2", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfcc9f28-fd", "ovs_interfaceid": "dfcc9f28-fdc3-4d22-a5a0-b2704f142312", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.608640] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e9557e-7633-be57-482e-c94bed6bf1f8, 'name': SearchDatastore_Task, 'duration_secs': 0.022118} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.609700] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.610038] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 50e4934b-b9b1-4887-b5d1-95a37fbf4c41/50e4934b-b9b1-4887-b5d1-95a37fbf4c41.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 752.610376] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c82e59a2-0d4f-42b1-b568-6a60ee9b190c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.619491] env[63028]: DEBUG oslo_concurrency.lockutils [req-313824d6-62b4-4fbd-bd9a-c4d63d3f0dae req-5a1caa11-6ed8-4214-a0b0-23a4063832c6 service nova] Releasing lock "refresh_cache-600195de-ceb4-41a6-9ade-dda8b898e4db" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.620404] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 752.620404] env[63028]: value = "task-2735373" [ 752.620404] env[63028]: _type = "Task" [ 752.620404] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.630268] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735373, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.746632] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] Releasing lock "refresh_cache-5982cd5d-abf1-42d4-bb44-8d79de599f11" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.747186] env[63028]: DEBUG nova.compute.manager [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Received event network-changed-cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 752.747470] env[63028]: DEBUG nova.compute.manager [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Refreshing instance network info cache due to event network-changed-cc6af35d-7e46-40e6-bc97-40efda1ab807. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 752.747826] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] Acquiring lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.748047] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] Acquired lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.748481] env[63028]: DEBUG nova.network.neutron [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Refreshing network info cache for port cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 752.763771] env[63028]: DEBUG oslo_vmware.api [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735371, 'name': ReconfigVM_Task, 'duration_secs': 0.172994} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.764078] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-510611a2-5774-4ec1-8a0f-357586159619 tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Reconfigured VM instance to set the machine id {{(pid=63028) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 752.806620] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c3a09395-4017-42a7-a909-5c9706ea7012 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "1316318e-8dcf-4ac2-b40a-6a3ab6964997" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.914s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.816402] env[63028]: DEBUG nova.compute.manager [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 752.816643] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 752.818086] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca3659d-8d2f-492e-85b3-1d4f32354db7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.826455] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 752.826839] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-961505c1-31c1-4881-9cbb-72c3f886bf90 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.835024] env[63028]: DEBUG oslo_vmware.api [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 752.835024] env[63028]: value = "task-2735374" [ 752.835024] env[63028]: _type = "Task" [ 752.835024] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.849804] env[63028]: DEBUG oslo_vmware.api [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.907283] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.938404] env[63028]: DEBUG nova.scheduler.client.report [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 753.003135] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520d0321-2a29-1af2-1d25-4f56d4ea2882, 'name': SearchDatastore_Task, 'duration_secs': 0.012864} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.004244] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46d270f7-6906-4d9e-945f-d1606d1abfdb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.017194] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 753.017194] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5254c2dd-d0eb-75b9-e7ac-45423fec4cd3" [ 753.017194] env[63028]: _type = "Task" [ 753.017194] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.030737] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5254c2dd-d0eb-75b9-e7ac-45423fec4cd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.039681] env[63028]: DEBUG oslo_vmware.api [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735372, 'name': PowerOffVM_Task, 'duration_secs': 0.240271} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.039919] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 753.040108] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 753.040366] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5b0cdb7-8394-4663-a78e-994d7a485bb7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.099412] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "refresh_cache-3e45e7f3-a34f-4eab-9fff-1c874c832e2a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.099771] env[63028]: DEBUG nova.compute.manager [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Instance network_info: |[{"id": "dfcc9f28-fdc3-4d22-a5a0-b2704f142312", "address": "fa:16:3e:4e:7e:e2", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfcc9f28-fd", "ovs_interfaceid": "dfcc9f28-fdc3-4d22-a5a0-b2704f142312", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 753.100250] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:7e:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dfcc9f28-fdc3-4d22-a5a0-b2704f142312', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 753.111048] env[63028]: DEBUG oslo.service.loopingcall [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 753.111498] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquiring lock "0e07a6cd-8c99-408d-95ba-63f7839c327f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.111738] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lock "0e07a6cd-8c99-408d-95ba-63f7839c327f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.111934] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquiring lock "0e07a6cd-8c99-408d-95ba-63f7839c327f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.112135] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lock "0e07a6cd-8c99-408d-95ba-63f7839c327f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.112306] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lock "0e07a6cd-8c99-408d-95ba-63f7839c327f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.114165] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 753.115935] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39fa3f5a-aae1-4ce8-879e-4ca3122c4840 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.131635] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 753.131635] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 753.131635] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Deleting the datastore file [datastore2] 5a330ed9-c106-49f2-b524-a424e717b5ce {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 753.132176] env[63028]: INFO nova.compute.manager [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Terminating instance [ 753.135119] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2fddee0-d4e1-4f8c-a2ed-fd3a37fd26ee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.148991] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735373, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.152162] env[63028]: DEBUG oslo_vmware.api [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for the task: (returnval){ [ 753.152162] env[63028]: value = "task-2735376" [ 753.152162] env[63028]: _type = "Task" [ 753.152162] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.152516] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 753.152516] env[63028]: value = "task-2735377" [ 753.152516] env[63028]: _type = "Task" [ 753.152516] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.167628] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735377, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.171311] env[63028]: DEBUG oslo_vmware.api [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.309224] env[63028]: DEBUG nova.compute.manager [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 753.347895] env[63028]: DEBUG oslo_vmware.api [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735374, 'name': PowerOffVM_Task, 'duration_secs': 0.24969} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.348117] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 753.348295] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 753.350848] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90177260-366d-45dc-9958-62433e5f50c6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.392117] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Acquiring lock "c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.392117] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Lock "c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.392117] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Acquiring lock "c0db2b2a-9c06-409c-b48b-a0d5c127f2dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.392117] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Lock "c0db2b2a-9c06-409c-b48b-a0d5c127f2dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.392117] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Lock "c0db2b2a-9c06-409c-b48b-a0d5c127f2dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.397171] env[63028]: INFO nova.compute.manager [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Terminating instance [ 753.416396] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 753.416573] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 753.416793] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Deleting the datastore file [datastore1] 0d96ba8e-b46b-48ae-957c-cdc49762c395 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 753.417072] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-347841e4-b542-4ad0-aed8-111757ef0da7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.424911] env[63028]: DEBUG oslo_vmware.api [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 753.424911] env[63028]: value = "task-2735379" [ 753.424911] env[63028]: _type = "Task" [ 753.424911] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.434326] env[63028]: DEBUG oslo_vmware.api [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.443791] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.742s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.444292] env[63028]: DEBUG nova.compute.manager [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 753.447058] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.050s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.447210] env[63028]: DEBUG nova.objects.instance [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Lazy-loading 'resources' on Instance uuid 4a782483-c24e-44db-b697-856c69cc4a13 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 753.513215] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Acquiring lock "8f6beda6-0fc6-4d85-9f27-f4248adda8f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.513579] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Lock "8f6beda6-0fc6-4d85-9f27-f4248adda8f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.514182] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Acquiring lock "8f6beda6-0fc6-4d85-9f27-f4248adda8f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.514182] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Lock "8f6beda6-0fc6-4d85-9f27-f4248adda8f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.514452] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Lock "8f6beda6-0fc6-4d85-9f27-f4248adda8f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.517226] env[63028]: INFO nova.compute.manager [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Terminating instance [ 753.531724] env[63028]: DEBUG nova.compute.manager [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Received event network-vif-plugged-dfcc9f28-fdc3-4d22-a5a0-b2704f142312 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 753.531724] env[63028]: DEBUG oslo_concurrency.lockutils [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] Acquiring lock "3e45e7f3-a34f-4eab-9fff-1c874c832e2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.531940] env[63028]: DEBUG oslo_concurrency.lockutils [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] Lock "3e45e7f3-a34f-4eab-9fff-1c874c832e2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.532132] env[63028]: DEBUG oslo_concurrency.lockutils [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] Lock "3e45e7f3-a34f-4eab-9fff-1c874c832e2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.532293] env[63028]: DEBUG nova.compute.manager [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] No waiting events found dispatching network-vif-plugged-dfcc9f28-fdc3-4d22-a5a0-b2704f142312 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 753.532452] env[63028]: WARNING nova.compute.manager [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Received unexpected event network-vif-plugged-dfcc9f28-fdc3-4d22-a5a0-b2704f142312 for instance with vm_state building and task_state spawning. [ 753.532607] env[63028]: DEBUG nova.compute.manager [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Received event network-changed-dfcc9f28-fdc3-4d22-a5a0-b2704f142312 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 753.532760] env[63028]: DEBUG nova.compute.manager [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Refreshing instance network info cache due to event network-changed-dfcc9f28-fdc3-4d22-a5a0-b2704f142312. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 753.533017] env[63028]: DEBUG oslo_concurrency.lockutils [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] Acquiring lock "refresh_cache-3e45e7f3-a34f-4eab-9fff-1c874c832e2a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.533098] env[63028]: DEBUG oslo_concurrency.lockutils [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] Acquired lock "refresh_cache-3e45e7f3-a34f-4eab-9fff-1c874c832e2a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.533228] env[63028]: DEBUG nova.network.neutron [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Refreshing network info cache for port dfcc9f28-fdc3-4d22-a5a0-b2704f142312 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 753.534666] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5254c2dd-d0eb-75b9-e7ac-45423fec4cd3, 'name': SearchDatastore_Task, 'duration_secs': 0.032216} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.535540] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.535802] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 5982cd5d-abf1-42d4-bb44-8d79de599f11/5982cd5d-abf1-42d4-bb44-8d79de599f11.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.536079] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69c4c60b-6a09-423d-bf01-461b83ff0a63 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.549335] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 753.549335] env[63028]: value = "task-2735380" [ 753.549335] env[63028]: _type = "Task" [ 753.549335] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.562220] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735380, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.588347] env[63028]: DEBUG nova.network.neutron [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updated VIF entry in instance network info cache for port cc6af35d-7e46-40e6-bc97-40efda1ab807. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 753.588496] env[63028]: DEBUG nova.network.neutron [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updating instance_info_cache with network_info: [{"id": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "address": "fa:16:3e:a3:ea:21", "network": {"id": "bbda1d1e-8f2d-4594-9711-57cd1f5c1d06", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-842445812-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ed58d3e63604c2fac29e5744fd7f0bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6af35d-7e", "ovs_interfaceid": "cc6af35d-7e46-40e6-bc97-40efda1ab807", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.605833] env[63028]: DEBUG nova.compute.manager [req-edbbdb5d-529d-44df-bc53-395cec533eaf req-2b83d064-9200-4254-b77e-66bcf2c412da service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Received event network-vif-deleted-e0e5a360-86c4-40e6-9e4a-06cd5da44d96 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 753.646666] env[63028]: DEBUG nova.compute.manager [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 753.646666] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 753.646895] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735373, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.697456} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.648032] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6432090e-a8f0-4e33-a78a-66e8828dd345 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.651151] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 50e4934b-b9b1-4887-b5d1-95a37fbf4c41/50e4934b-b9b1-4887-b5d1-95a37fbf4c41.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 753.651421] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 753.651700] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-099dcef2-420b-4bbe-9e8d-b3bf9dc83f94 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.663690] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 753.663884] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27ccd678-ac11-42e6-8703-b475e988f23c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.667178] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 753.667178] env[63028]: value = "task-2735381" [ 753.667178] env[63028]: _type = "Task" [ 753.667178] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.675167] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735377, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.676516] env[63028]: DEBUG oslo_vmware.api [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Task: {'id': task-2735376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.359941} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.681008] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 753.681344] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 753.681599] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 753.681877] env[63028]: INFO nova.compute.manager [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Took 1.17 seconds to destroy the instance on the hypervisor. [ 753.682215] env[63028]: DEBUG oslo.service.loopingcall [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 753.683803] env[63028]: DEBUG nova.compute.manager [-] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 753.683974] env[63028]: DEBUG nova.network.neutron [-] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 753.686459] env[63028]: DEBUG oslo_vmware.api [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for the task: (returnval){ [ 753.686459] env[63028]: value = "task-2735382" [ 753.686459] env[63028]: _type = "Task" [ 753.686459] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.696883] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735381, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.704351] env[63028]: DEBUG oslo_vmware.api [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.833981] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.901328] env[63028]: DEBUG nova.compute.manager [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 753.901576] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 753.902577] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2d20a1-a3b0-4c73-90ac-de5abdfafbfd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.913830] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 753.914284] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ebc817f-2ffc-48b8-9be4-fad0452b33cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.924397] env[63028]: DEBUG oslo_vmware.api [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Waiting for the task: (returnval){ [ 753.924397] env[63028]: value = "task-2735383" [ 753.924397] env[63028]: _type = "Task" [ 753.924397] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.942873] env[63028]: DEBUG oslo_vmware.api [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.947941] env[63028]: DEBUG oslo_vmware.api [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735379, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287233} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.948296] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 753.948563] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 753.948848] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 753.949075] env[63028]: INFO nova.compute.manager [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Took 1.13 seconds to destroy the instance on the hypervisor. [ 753.949373] env[63028]: DEBUG oslo.service.loopingcall [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 753.950621] env[63028]: DEBUG nova.compute.utils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 753.954795] env[63028]: DEBUG nova.compute.manager [-] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 753.954996] env[63028]: DEBUG nova.network.neutron [-] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 753.957430] env[63028]: DEBUG nova.compute.manager [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 753.957631] env[63028]: DEBUG nova.network.neutron [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 754.029832] env[63028]: DEBUG nova.policy [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4dfc1e9e9e74072949517c7b930c147', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43b7c1be3c4343a4b4f288a355170873', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 754.033423] env[63028]: DEBUG nova.compute.manager [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 754.033423] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 754.034495] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d2a2df-ca73-4b7d-98c3-a128cf727c89 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.051423] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 754.052100] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ecb63bfa-e74f-48ce-934f-fd0489dc4900 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.070160] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735380, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.071521] env[63028]: DEBUG oslo_vmware.api [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Waiting for the task: (returnval){ [ 754.071521] env[63028]: value = "task-2735384" [ 754.071521] env[63028]: _type = "Task" [ 754.071521] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.085846] env[63028]: DEBUG oslo_vmware.api [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.091160] env[63028]: DEBUG oslo_concurrency.lockutils [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] Releasing lock "refresh_cache-0e07a6cd-8c99-408d-95ba-63f7839c327f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.091546] env[63028]: DEBUG nova.compute.manager [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Received event network-vif-deleted-d4e7f627-9b53-4218-b7e5-a5c06ee4d53e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 754.091641] env[63028]: INFO nova.compute.manager [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Neutron deleted interface d4e7f627-9b53-4218-b7e5-a5c06ee4d53e; detaching it from the instance and deleting it from the info cache [ 754.091933] env[63028]: DEBUG nova.network.neutron [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Updating instance_info_cache with network_info: [{"id": "fbe61f8e-ac8c-487d-95a7-fa4740f61aa0", "address": "fa:16:3e:b6:4e:03", "network": {"id": "4f362bbc-35c9-4673-b6a6-8a7ea5638da5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-92092290", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbe61f8e-ac", "ovs_interfaceid": "fbe61f8e-ac8c-487d-95a7-fa4740f61aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e0e5a360-86c4-40e6-9e4a-06cd5da44d96", "address": "fa:16:3e:b7:6d:92", "network": {"id": "f31308ae-79dd-4944-81e2-153058a3ea34", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1582446931", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0e5a360-86", "ovs_interfaceid": "e0e5a360-86c4-40e6-9e4a-06cd5da44d96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.166627] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735377, 'name': CreateVM_Task, 'duration_secs': 0.522878} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.166866] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 754.171025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.171025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.171025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 754.171025] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1db8fec9-eeae-45a4-9d4a-d531502397b6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.177230] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 754.177230] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521ad363-5a8c-2e39-98fa-4e6ab2ef79e5" [ 754.177230] env[63028]: _type = "Task" [ 754.177230] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.195893] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735381, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107551} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.199962] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.200428] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521ad363-5a8c-2e39-98fa-4e6ab2ef79e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.205168] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556ab622-a430-40fa-92b4-3ab7b2719870 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.211879] env[63028]: DEBUG oslo_vmware.api [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735382, 'name': PowerOffVM_Task, 'duration_secs': 0.294941} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.212654] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 754.212859] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 754.213156] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-46313c30-5914-4b40-9301-47d5828a6e27 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.238463] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 50e4934b-b9b1-4887-b5d1-95a37fbf4c41/50e4934b-b9b1-4887-b5d1-95a37fbf4c41.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.244238] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c2c12a1-8f58-444d-8b19-4097aaa98b9c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.271154] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 754.271154] env[63028]: value = "task-2735386" [ 754.271154] env[63028]: _type = "Task" [ 754.271154] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.282583] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735386, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.317664] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 754.318077] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 754.318434] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Deleting the datastore file [datastore1] 0e07a6cd-8c99-408d-95ba-63f7839c327f {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 754.322010] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5bb30fbb-897f-48df-86a8-96e52ba81680 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.334814] env[63028]: DEBUG oslo_vmware.api [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for the task: (returnval){ [ 754.334814] env[63028]: value = "task-2735387" [ 754.334814] env[63028]: _type = "Task" [ 754.334814] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.353790] env[63028]: DEBUG oslo_vmware.api [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735387, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.439903] env[63028]: DEBUG oslo_vmware.api [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735383, 'name': PowerOffVM_Task, 'duration_secs': 0.341224} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.440416] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 754.440416] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 754.440740] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-40207dfb-91dc-4cd1-abfe-0aa39544cc9d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.461846] env[63028]: DEBUG nova.compute.manager [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 754.531160] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 754.531578] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 754.531771] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Deleting the datastore file [datastore2] c0db2b2a-9c06-409c-b48b-a0d5c127f2dc {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 754.532075] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b763657-da94-477b-adcd-b4507840c05b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.542257] env[63028]: DEBUG oslo_vmware.api [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Waiting for the task: (returnval){ [ 754.542257] env[63028]: value = "task-2735389" [ 754.542257] env[63028]: _type = "Task" [ 754.542257] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.550827] env[63028]: DEBUG oslo_vmware.api [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735389, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.562479] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735380, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.838971} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.562770] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 5982cd5d-abf1-42d4-bb44-8d79de599f11/5982cd5d-abf1-42d4-bb44-8d79de599f11.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.563252] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.568919] env[63028]: DEBUG nova.network.neutron [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Updated VIF entry in instance network info cache for port dfcc9f28-fdc3-4d22-a5a0-b2704f142312. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 754.568919] env[63028]: DEBUG nova.network.neutron [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Updating instance_info_cache with network_info: [{"id": "dfcc9f28-fdc3-4d22-a5a0-b2704f142312", "address": "fa:16:3e:4e:7e:e2", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfcc9f28-fd", "ovs_interfaceid": "dfcc9f28-fdc3-4d22-a5a0-b2704f142312", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.568919] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02b67c95-4ac7-409b-a92f-b23c179ef298 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.581558] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 754.581558] env[63028]: value = "task-2735390" [ 754.581558] env[63028]: _type = "Task" [ 754.581558] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.593778] env[63028]: DEBUG nova.network.neutron [-] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.595349] env[63028]: DEBUG oslo_vmware.api [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735384, 'name': PowerOffVM_Task, 'duration_secs': 0.279678} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.600663] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 754.600949] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 754.602310] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca3496c5-adb7-4a89-94ab-5833dce9a4cb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.604272] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c71145ae-1a85-44e8-96f7-c780fedde10f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.612506] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735390, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.625795] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68d3427-258e-478c-a2f5-1444c138cd66 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.672984] env[63028]: DEBUG nova.compute.manager [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Detach interface failed, port_id=d4e7f627-9b53-4218-b7e5-a5c06ee4d53e, reason: Instance e2d39c43-6666-4fda-b8e2-485399c59e46 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 754.673216] env[63028]: DEBUG nova.compute.manager [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Received event network-vif-deleted-fbe61f8e-ac8c-487d-95a7-fa4740f61aa0 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 754.673401] env[63028]: INFO nova.compute.manager [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Neutron deleted interface fbe61f8e-ac8c-487d-95a7-fa4740f61aa0; detaching it from the instance and deleting it from the info cache [ 754.673659] env[63028]: DEBUG nova.network.neutron [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Updating instance_info_cache with network_info: [{"id": "e0e5a360-86c4-40e6-9e4a-06cd5da44d96", "address": "fa:16:3e:b7:6d:92", "network": {"id": "f31308ae-79dd-4944-81e2-153058a3ea34", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1582446931", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0e5a360-86", "ovs_interfaceid": "e0e5a360-86c4-40e6-9e4a-06cd5da44d96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.685752] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 754.685752] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 754.685752] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Deleting the datastore file [datastore1] 8f6beda6-0fc6-4d85-9f27-f4248adda8f3 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 754.689056] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14d8e452-b47b-4639-873e-c90224497c86 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.698047] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521ad363-5a8c-2e39-98fa-4e6ab2ef79e5, 'name': SearchDatastore_Task, 'duration_secs': 0.047197} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.699356] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.699594] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 754.699886] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.700077] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.700266] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 754.700584] env[63028]: DEBUG oslo_vmware.api [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Waiting for the task: (returnval){ [ 754.700584] env[63028]: value = "task-2735392" [ 754.700584] env[63028]: _type = "Task" [ 754.700584] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.704761] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29ab463e-1442-43d5-97ed-1944c7d699dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.716218] env[63028]: DEBUG oslo_vmware.api [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735392, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.729762] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 754.729762] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 754.729762] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df0276c2-7c9a-40d8-b9a8-a407e94b3baf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.733636] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3ba555-5944-4175-9218-02a1da68e0a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.740897] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 754.740897] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5242f487-694d-b416-5c15-09d256c06f9a" [ 754.740897] env[63028]: _type = "Task" [ 754.740897] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.746869] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69eccda-71a3-46f4-b083-6d158c747a98 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.758397] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5242f487-694d-b416-5c15-09d256c06f9a, 'name': SearchDatastore_Task, 'duration_secs': 0.014694} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.787076] env[63028]: DEBUG nova.network.neutron [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Successfully created port: 5dce79e8-b6a0-4077-a879-e3221a0a358c {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 754.789010] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fbe43fc-c243-4c84-92f2-a4528b4470c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.796121] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beda1ee8-8945-4de4-b6a5-a8ae7f2a70ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.804528] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 754.804528] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526fd4d4-cf9b-0c6c-0bb3-b111de319738" [ 754.804528] env[63028]: _type = "Task" [ 754.804528] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.804797] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735386, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.810689] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3819f8b5-cc49-4613-931e-039d8e8fe333 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.826222] env[63028]: DEBUG nova.compute.provider_tree [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.830617] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526fd4d4-cf9b-0c6c-0bb3-b111de319738, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.844709] env[63028]: DEBUG oslo_vmware.api [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Task: {'id': task-2735387, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.386931} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.844799] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 754.844987] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 754.845186] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 754.845356] env[63028]: INFO nova.compute.manager [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Took 1.20 seconds to destroy the instance on the hypervisor. [ 754.845593] env[63028]: DEBUG oslo.service.loopingcall [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 754.845773] env[63028]: DEBUG nova.compute.manager [-] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 754.845864] env[63028]: DEBUG nova.network.neutron [-] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 754.934189] env[63028]: DEBUG nova.network.neutron [-] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.053259] env[63028]: DEBUG oslo_vmware.api [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Task: {'id': task-2735389, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.500496} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.057040] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 755.057040] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 755.057040] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 755.057040] env[63028]: INFO nova.compute.manager [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Took 1.15 seconds to destroy the instance on the hypervisor. [ 755.057040] env[63028]: DEBUG oslo.service.loopingcall [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 755.057040] env[63028]: DEBUG nova.compute.manager [-] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 755.057040] env[63028]: DEBUG nova.network.neutron [-] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 755.075853] env[63028]: DEBUG oslo_concurrency.lockutils [req-43159bc1-42df-479b-ae1c-ea21742a0359 req-78af0b2a-ad30-49d5-8e23-f7de0bfe7d18 service nova] Releasing lock "refresh_cache-3e45e7f3-a34f-4eab-9fff-1c874c832e2a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.093578] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735390, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089091} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.093909] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.094724] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6dff67-9e46-4b0d-8660-14bd4d11f2be {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.103311] env[63028]: INFO nova.compute.manager [-] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Took 1.42 seconds to deallocate network for instance. [ 755.120420] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 5982cd5d-abf1-42d4-bb44-8d79de599f11/5982cd5d-abf1-42d4-bb44-8d79de599f11.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.123526] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eeb934a1-7dca-4350-a49a-264ccdd07cd9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.145495] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 755.145495] env[63028]: value = "task-2735393" [ 755.145495] env[63028]: _type = "Task" [ 755.145495] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.154463] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735393, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.176452] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa6a5148-3b5c-4c10-abd6-f5f1fb3d7942 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.189276] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfdaacc-08d5-40fd-b267-afb3e55eb5bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.214885] env[63028]: DEBUG oslo_vmware.api [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735392, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.231587] env[63028]: DEBUG nova.compute.manager [req-bbed12ed-790d-44bb-a04e-d591b68f52cf req-ebad6537-0b56-41f9-9d20-c42568dda7f0 service nova] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Detach interface failed, port_id=fbe61f8e-ac8c-487d-95a7-fa4740f61aa0, reason: Instance e2d39c43-6666-4fda-b8e2-485399c59e46 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 755.296343] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735386, 'name': ReconfigVM_Task, 'duration_secs': 0.833348} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.296665] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 50e4934b-b9b1-4887-b5d1-95a37fbf4c41/50e4934b-b9b1-4887-b5d1-95a37fbf4c41.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.297364] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bdf54f45-22d9-4558-bf1a-d2c7e24fed3f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.305577] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 755.305577] env[63028]: value = "task-2735394" [ 755.305577] env[63028]: _type = "Task" [ 755.305577] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.322025] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526fd4d4-cf9b-0c6c-0bb3-b111de319738, 'name': SearchDatastore_Task, 'duration_secs': 0.035757} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.322652] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.322652] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 3e45e7f3-a34f-4eab-9fff-1c874c832e2a/3e45e7f3-a34f-4eab-9fff-1c874c832e2a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 755.322953] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735394, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.322953] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35b9f21d-5a1d-44e0-b4aa-5321120f904a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.332051] env[63028]: DEBUG nova.scheduler.client.report [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 755.336086] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 755.336086] env[63028]: value = "task-2735395" [ 755.336086] env[63028]: _type = "Task" [ 755.336086] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.347915] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735395, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.437362] env[63028]: INFO nova.compute.manager [-] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Took 1.48 seconds to deallocate network for instance. [ 755.469495] env[63028]: DEBUG nova.compute.manager [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 755.504214] env[63028]: DEBUG nova.virt.hardware [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 755.504214] env[63028]: DEBUG nova.virt.hardware [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 755.504214] env[63028]: DEBUG nova.virt.hardware [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 755.504214] env[63028]: DEBUG nova.virt.hardware [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 755.504419] env[63028]: DEBUG nova.virt.hardware [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 755.504419] env[63028]: DEBUG nova.virt.hardware [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 755.504594] env[63028]: DEBUG nova.virt.hardware [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 755.504755] env[63028]: DEBUG nova.virt.hardware [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 755.504922] env[63028]: DEBUG nova.virt.hardware [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 755.505871] env[63028]: DEBUG nova.virt.hardware [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 755.506169] env[63028]: DEBUG nova.virt.hardware [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 755.508313] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafd008a-591f-41ca-be08-00f3fa67dfdc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.517659] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c5f482-2afc-40c9-a323-561ddaaf2819 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.556412] env[63028]: DEBUG nova.compute.manager [req-b3aeb6ff-8609-4052-ba1a-14480aaaf620 req-56562e1a-0bc3-4949-82c1-c25d6a63a710 service nova] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Received event network-vif-deleted-54686a8a-3d85-4fee-89ee-c097c3ec620f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 755.643920] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.656804] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735393, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.660933] env[63028]: DEBUG nova.compute.manager [req-2cfa26ef-abff-482a-83e5-cd839bd44bab req-7f93c8c0-beff-4347-b59c-06ffbd2b4d39 service nova] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Received event network-vif-deleted-6dc62708-050a-40f3-b99a-f51b25937806 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 755.660933] env[63028]: DEBUG nova.compute.manager [req-2cfa26ef-abff-482a-83e5-cd839bd44bab req-7f93c8c0-beff-4347-b59c-06ffbd2b4d39 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Received event network-vif-deleted-cc6af35d-7e46-40e6-bc97-40efda1ab807 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 755.660933] env[63028]: INFO nova.compute.manager [req-2cfa26ef-abff-482a-83e5-cd839bd44bab req-7f93c8c0-beff-4347-b59c-06ffbd2b4d39 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Neutron deleted interface cc6af35d-7e46-40e6-bc97-40efda1ab807; detaching it from the instance and deleting it from the info cache [ 755.660933] env[63028]: DEBUG nova.network.neutron [req-2cfa26ef-abff-482a-83e5-cd839bd44bab req-7f93c8c0-beff-4347-b59c-06ffbd2b4d39 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.719896] env[63028]: DEBUG oslo_vmware.api [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Task: {'id': task-2735392, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.667977} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.721141] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 755.721764] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 755.723648] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 755.723648] env[63028]: INFO nova.compute.manager [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Took 1.69 seconds to destroy the instance on the hypervisor. [ 755.723648] env[63028]: DEBUG oslo.service.loopingcall [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 755.724274] env[63028]: DEBUG nova.compute.manager [-] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 755.724897] env[63028]: DEBUG nova.network.neutron [-] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 755.828274] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735394, 'name': Rename_Task, 'duration_secs': 0.389384} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.829074] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.829409] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5963d5fd-c9b8-4ed5-aa52-b5ab4c709d1b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.837907] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.391s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.842404] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.311s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.846519] env[63028]: INFO nova.compute.claims [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 755.853544] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 755.853544] env[63028]: value = "task-2735396" [ 755.853544] env[63028]: _type = "Task" [ 755.853544] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.865063] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735395, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.869308] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735396, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.883756] env[63028]: INFO nova.scheduler.client.report [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Deleted allocations for instance 4a782483-c24e-44db-b697-856c69cc4a13 [ 755.944720] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.092469] env[63028]: DEBUG nova.network.neutron [-] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.147676] env[63028]: DEBUG nova.network.neutron [-] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.159449] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735393, 'name': ReconfigVM_Task, 'duration_secs': 0.888804} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.159787] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 5982cd5d-abf1-42d4-bb44-8d79de599f11/5982cd5d-abf1-42d4-bb44-8d79de599f11.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 756.160484] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d86cc988-132b-4360-96ca-15cbfe6f8235 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.168445] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97b6ff05-d4bb-4ccf-84bb-9f16becd1727 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.170842] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 756.170842] env[63028]: value = "task-2735397" [ 756.170842] env[63028]: _type = "Task" [ 756.170842] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.179822] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f0ed75-5fce-4273-8b75-371fbfe9c123 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.196658] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735397, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.227678] env[63028]: DEBUG nova.compute.manager [req-2cfa26ef-abff-482a-83e5-cd839bd44bab req-7f93c8c0-beff-4347-b59c-06ffbd2b4d39 service nova] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Detach interface failed, port_id=cc6af35d-7e46-40e6-bc97-40efda1ab807, reason: Instance 0e07a6cd-8c99-408d-95ba-63f7839c327f could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 756.359544] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735395, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.750519} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.364056] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 3e45e7f3-a34f-4eab-9fff-1c874c832e2a/3e45e7f3-a34f-4eab-9fff-1c874c832e2a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 756.364565] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 756.366022] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30a848bc-d18e-42c1-b784-70510b8d1f8a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.377066] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735396, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.379445] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 756.379445] env[63028]: value = "task-2735398" [ 756.379445] env[63028]: _type = "Task" [ 756.379445] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.394982] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735398, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.394982] env[63028]: DEBUG oslo_concurrency.lockutils [None req-70b97bb8-6f63-4727-9ea4-16bca122cb7f tempest-VolumesAssistedSnapshotsTest-346075985 tempest-VolumesAssistedSnapshotsTest-346075985-project-member] Lock "4a782483-c24e-44db-b697-856c69cc4a13" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.979s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.597225] env[63028]: INFO nova.compute.manager [-] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Took 1.75 seconds to deallocate network for instance. [ 756.653276] env[63028]: INFO nova.compute.manager [-] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Took 1.60 seconds to deallocate network for instance. [ 756.681941] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735397, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.695254] env[63028]: DEBUG nova.network.neutron [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Successfully updated port: 5dce79e8-b6a0-4077-a879-e3221a0a358c {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 756.876414] env[63028]: DEBUG oslo_vmware.api [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735396, 'name': PowerOnVM_Task, 'duration_secs': 0.806641} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.877022] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 756.877022] env[63028]: DEBUG nova.compute.manager [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 756.880224] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211e86de-c51d-49c3-9e94-c81ee9e1bead {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.892012] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735398, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074683} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.893832] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 756.904952] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46282cf5-f5ce-4026-8fd6-aeeac18b8148 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.933255] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 3e45e7f3-a34f-4eab-9fff-1c874c832e2a/3e45e7f3-a34f-4eab-9fff-1c874c832e2a.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 756.934972] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c31fd1f-fde4-4855-a622-01145fc2e78b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.970787] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 756.970787] env[63028]: value = "task-2735399" [ 756.970787] env[63028]: _type = "Task" [ 756.970787] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.982092] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735399, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.109370] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.136808] env[63028]: DEBUG nova.network.neutron [-] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.163162] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.188183] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735397, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.200914] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "refresh_cache-a4b0d948-d950-414a-b23f-faefa5ab038c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.201113] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquired lock "refresh_cache-a4b0d948-d950-414a-b23f-faefa5ab038c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.201314] env[63028]: DEBUG nova.network.neutron [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 757.421422] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.436024] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "70888889-4965-47ab-ad47-59f1c1286bd8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.436024] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "70888889-4965-47ab-ad47-59f1c1286bd8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.477117] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9907ff2-1856-45a0-9b2c-433cc5567479 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.483996] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.489217] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6472fc-79c6-4d15-8728-eba974f5f9e8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.524693] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddba1a31-d5c5-4f1c-a472-8ceed7113e44 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.533297] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6b5900-63f0-4c75-97c8-4a297d9a73d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.547811] env[63028]: DEBUG nova.compute.provider_tree [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.594395] env[63028]: DEBUG nova.compute.manager [req-3db5979e-a54c-4fb8-8910-a7aab55c8d0d req-ae1d3a80-c156-43a6-b40c-7aec8177316d service nova] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Received event network-vif-deleted-a5a7398c-2688-4f83-abb5-933faad7d16c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 757.594651] env[63028]: DEBUG nova.compute.manager [req-3db5979e-a54c-4fb8-8910-a7aab55c8d0d req-ae1d3a80-c156-43a6-b40c-7aec8177316d service nova] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Received event network-vif-deleted-4ab9f841-7392-47cd-afac-be9ddd19b6bf {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 757.642198] env[63028]: INFO nova.compute.manager [-] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Took 1.92 seconds to deallocate network for instance. [ 757.688837] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735397, 'name': Rename_Task, 'duration_secs': 1.220658} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.690442] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 757.690442] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22d96db2-420e-4e4a-babf-1e7b1be000e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.701677] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 757.701677] env[63028]: value = "task-2735400" [ 757.701677] env[63028]: _type = "Task" [ 757.701677] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.713858] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735400, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.741981] env[63028]: DEBUG nova.compute.manager [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Received event network-vif-plugged-5dce79e8-b6a0-4077-a879-e3221a0a358c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 757.742284] env[63028]: DEBUG oslo_concurrency.lockutils [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] Acquiring lock "a4b0d948-d950-414a-b23f-faefa5ab038c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.742544] env[63028]: DEBUG oslo_concurrency.lockutils [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] Lock "a4b0d948-d950-414a-b23f-faefa5ab038c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.742758] env[63028]: DEBUG oslo_concurrency.lockutils [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] Lock "a4b0d948-d950-414a-b23f-faefa5ab038c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.743038] env[63028]: DEBUG nova.compute.manager [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] No waiting events found dispatching network-vif-plugged-5dce79e8-b6a0-4077-a879-e3221a0a358c {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 757.743281] env[63028]: WARNING nova.compute.manager [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Received unexpected event network-vif-plugged-5dce79e8-b6a0-4077-a879-e3221a0a358c for instance with vm_state building and task_state spawning. [ 757.743520] env[63028]: DEBUG nova.compute.manager [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Received event network-changed-5dce79e8-b6a0-4077-a879-e3221a0a358c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 757.743659] env[63028]: DEBUG nova.compute.manager [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Refreshing instance network info cache due to event network-changed-5dce79e8-b6a0-4077-a879-e3221a0a358c. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 757.743872] env[63028]: DEBUG oslo_concurrency.lockutils [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] Acquiring lock "refresh_cache-a4b0d948-d950-414a-b23f-faefa5ab038c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.786188] env[63028]: DEBUG nova.network.neutron [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 757.982415] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735399, 'name': ReconfigVM_Task, 'duration_secs': 0.873739} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.982691] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 3e45e7f3-a34f-4eab-9fff-1c874c832e2a/3e45e7f3-a34f-4eab-9fff-1c874c832e2a.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 757.983455] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd0bb218-54d8-41a1-8894-8376bb9de8f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.992106] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 757.992106] env[63028]: value = "task-2735401" [ 757.992106] env[63028]: _type = "Task" [ 757.992106] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.002315] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735401, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.051013] env[63028]: DEBUG nova.scheduler.client.report [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 758.149123] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.212911] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735400, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.322354] env[63028]: DEBUG nova.network.neutron [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Updating instance_info_cache with network_info: [{"id": "5dce79e8-b6a0-4077-a879-e3221a0a358c", "address": "fa:16:3e:30:40:81", "network": {"id": "103ef17f-0425-48ae-98df-250d0d0dd66a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-13124710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b7c1be3c4343a4b4f288a355170873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dce79e8-b6", "ovs_interfaceid": "5dce79e8-b6a0-4077-a879-e3221a0a358c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.504904] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735401, 'name': Rename_Task, 'duration_secs': 0.171335} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.505378] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 758.505726] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53fb213e-8ac2-4ea4-9d12-c51a298e79ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.517876] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 758.517876] env[63028]: value = "task-2735402" [ 758.517876] env[63028]: _type = "Task" [ 758.517876] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.528520] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735402, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.556684] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.714s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.557280] env[63028]: DEBUG nova.compute.manager [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 758.560089] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.982s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.560349] env[63028]: DEBUG nova.objects.instance [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lazy-loading 'resources' on Instance uuid 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 758.716098] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735400, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.825656] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Releasing lock "refresh_cache-a4b0d948-d950-414a-b23f-faefa5ab038c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.826046] env[63028]: DEBUG nova.compute.manager [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Instance network_info: |[{"id": "5dce79e8-b6a0-4077-a879-e3221a0a358c", "address": "fa:16:3e:30:40:81", "network": {"id": "103ef17f-0425-48ae-98df-250d0d0dd66a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-13124710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b7c1be3c4343a4b4f288a355170873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dce79e8-b6", "ovs_interfaceid": "5dce79e8-b6a0-4077-a879-e3221a0a358c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 758.826422] env[63028]: DEBUG oslo_concurrency.lockutils [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] Acquired lock "refresh_cache-a4b0d948-d950-414a-b23f-faefa5ab038c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.826576] env[63028]: DEBUG nova.network.neutron [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Refreshing network info cache for port 5dce79e8-b6a0-4077-a879-e3221a0a358c {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 758.828958] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:40:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3261e15f-7e45-4516-acfd-341bab16e3cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5dce79e8-b6a0-4077-a879-e3221a0a358c', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 758.836891] env[63028]: DEBUG oslo.service.loopingcall [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 758.837393] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 758.838292] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8cd0c81-a437-4ed5-b8fb-38b59d8b3de2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.861617] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 758.861617] env[63028]: value = "task-2735403" [ 758.861617] env[63028]: _type = "Task" [ 758.861617] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.871453] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735403, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.028925] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735402, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.067425] env[63028]: DEBUG nova.compute.utils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 759.072334] env[63028]: DEBUG nova.compute.manager [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 759.072334] env[63028]: DEBUG nova.network.neutron [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 759.134566] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "50e4934b-b9b1-4887-b5d1-95a37fbf4c41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.134816] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "50e4934b-b9b1-4887-b5d1-95a37fbf4c41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.135037] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "50e4934b-b9b1-4887-b5d1-95a37fbf4c41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.135252] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "50e4934b-b9b1-4887-b5d1-95a37fbf4c41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.135470] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "50e4934b-b9b1-4887-b5d1-95a37fbf4c41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.137804] env[63028]: INFO nova.compute.manager [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Terminating instance [ 759.155924] env[63028]: DEBUG nova.policy [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43ed2fb3f1a944fdac8ee7778f171cd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8efc6d89903c454eb39136a76e0adef5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 759.222199] env[63028]: DEBUG oslo_vmware.api [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735400, 'name': PowerOnVM_Task, 'duration_secs': 1.043246} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.222199] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 759.222326] env[63028]: INFO nova.compute.manager [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Took 11.54 seconds to spawn the instance on the hypervisor. [ 759.222539] env[63028]: DEBUG nova.compute.manager [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 759.223493] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245bff7d-cae3-4626-bc1c-a26feeb8acb2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.373761] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735403, 'name': CreateVM_Task, 'duration_secs': 0.387766} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.374188] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 759.375422] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.375795] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.379021] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 759.379021] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c4b131a-f77d-41cb-aeac-a162df3a0572 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.383645] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 759.383645] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52021b62-18b0-5a19-dd10-a3499a439e7f" [ 759.383645] env[63028]: _type = "Task" [ 759.383645] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.397520] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52021b62-18b0-5a19-dd10-a3499a439e7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.534883] env[63028]: DEBUG oslo_vmware.api [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735402, 'name': PowerOnVM_Task, 'duration_secs': 0.943395} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.541088] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 759.541441] env[63028]: INFO nova.compute.manager [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Took 9.04 seconds to spawn the instance on the hypervisor. [ 759.541684] env[63028]: DEBUG nova.compute.manager [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 759.542840] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a24759-6af3-46f2-a1da-009e092e0161 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.573062] env[63028]: DEBUG nova.compute.manager [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 759.645115] env[63028]: DEBUG nova.compute.manager [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 759.645369] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 759.646314] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a291bee8-f7cf-4a8c-a0ec-72e4f16250a5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.653396] env[63028]: DEBUG nova.network.neutron [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Updated VIF entry in instance network info cache for port 5dce79e8-b6a0-4077-a879-e3221a0a358c. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 759.653799] env[63028]: DEBUG nova.network.neutron [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Updating instance_info_cache with network_info: [{"id": "5dce79e8-b6a0-4077-a879-e3221a0a358c", "address": "fa:16:3e:30:40:81", "network": {"id": "103ef17f-0425-48ae-98df-250d0d0dd66a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-13124710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b7c1be3c4343a4b4f288a355170873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dce79e8-b6", "ovs_interfaceid": "5dce79e8-b6a0-4077-a879-e3221a0a358c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.658401] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 759.659044] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21b935a3-dcad-4022-84fa-fe7f3a1031d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.667437] env[63028]: DEBUG oslo_vmware.api [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 759.667437] env[63028]: value = "task-2735404" [ 759.667437] env[63028]: _type = "Task" [ 759.667437] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.682053] env[63028]: DEBUG oslo_vmware.api [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735404, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.714152] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22cadae7-59e8-4f12-a51a-83672271e51b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.724425] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be93799-44e4-4176-82f3-100910dae440 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.770843] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e076ff26-4c19-4366-945d-f70ad59e258f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.775791] env[63028]: INFO nova.compute.manager [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Took 45.30 seconds to build instance. [ 759.782953] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2152ae68-0da5-4f27-88e1-abcbd142b036 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.801759] env[63028]: DEBUG nova.compute.provider_tree [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.827423] env[63028]: DEBUG nova.network.neutron [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Successfully created port: 431bf6ff-554c-484a-8431-a1f18a9b937e {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.838959] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "514c83d1-4fb1-435c-8c25-aa112c744131" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.838959] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "514c83d1-4fb1-435c-8c25-aa112c744131" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.896497] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52021b62-18b0-5a19-dd10-a3499a439e7f, 'name': SearchDatastore_Task, 'duration_secs': 0.02135} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.897439] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.897940] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 759.898276] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.898396] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.898579] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 759.899217] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b995c57d-70d6-4841-b353-2aa0bc4f6aaa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.925253] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 759.925475] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 759.926315] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e56f8468-b864-45c0-a5c0-3249d8f7f520 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.933696] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 759.933696] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528d9ca0-8a66-1e71-6338-53ea4faddd00" [ 759.933696] env[63028]: _type = "Task" [ 759.933696] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.944958] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528d9ca0-8a66-1e71-6338-53ea4faddd00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.073807] env[63028]: INFO nova.compute.manager [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Took 44.82 seconds to build instance. [ 760.157212] env[63028]: DEBUG oslo_concurrency.lockutils [req-31414147-4f29-4b53-bb5d-8258fd1ffeb2 req-f67c5c61-e98a-4648-98a6-1f6eb1fe7f15 service nova] Releasing lock "refresh_cache-a4b0d948-d950-414a-b23f-faefa5ab038c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.184446] env[63028]: DEBUG oslo_vmware.api [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735404, 'name': PowerOffVM_Task, 'duration_secs': 0.287114} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.184446] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 760.184446] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 760.184446] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e93a32b7-d89b-4be8-ad63-5c0956ecead2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.254026] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 760.254026] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 760.254026] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleting the datastore file [datastore1] 50e4934b-b9b1-4887-b5d1-95a37fbf4c41 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 760.254026] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3eaba6c8-3e5b-4951-9722-87664f3c6a31 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.266428] env[63028]: DEBUG oslo_vmware.api [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 760.266428] env[63028]: value = "task-2735406" [ 760.266428] env[63028]: _type = "Task" [ 760.266428] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.276341] env[63028]: DEBUG oslo_vmware.api [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735406, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.278578] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0845dbb5-9e8a-45d4-8122-206916da704e tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "5982cd5d-abf1-42d4-bb44-8d79de599f11" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.968s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.304782] env[63028]: DEBUG nova.scheduler.client.report [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.445834] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528d9ca0-8a66-1e71-6338-53ea4faddd00, 'name': SearchDatastore_Task, 'duration_secs': 0.011915} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.446658] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c63ffdd0-baa1-40ca-bea5-09f782ba3b41 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.453186] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 760.453186] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bddec1-f1f0-8d50-dcbd-03989d774569" [ 760.453186] env[63028]: _type = "Task" [ 760.453186] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.465950] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bddec1-f1f0-8d50-dcbd-03989d774569, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.576426] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2083659-4645-4440-9683-4af8a1564415 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "3e45e7f3-a34f-4eab-9fff-1c874c832e2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.406s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.587719] env[63028]: DEBUG nova.compute.manager [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 760.622398] env[63028]: DEBUG nova.virt.hardware [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 760.622398] env[63028]: DEBUG nova.virt.hardware [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.622398] env[63028]: DEBUG nova.virt.hardware [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 760.622398] env[63028]: DEBUG nova.virt.hardware [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.622398] env[63028]: DEBUG nova.virt.hardware [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 760.622398] env[63028]: DEBUG nova.virt.hardware [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 760.622398] env[63028]: DEBUG nova.virt.hardware [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 760.622398] env[63028]: DEBUG nova.virt.hardware [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 760.622398] env[63028]: DEBUG nova.virt.hardware [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 760.625476] env[63028]: DEBUG nova.virt.hardware [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 760.625476] env[63028]: DEBUG nova.virt.hardware [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 760.626452] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf8332b-ff5f-4860-8075-0aa99d466972 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.640021] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2793f714-0cb3-4b8e-9460-a48c4e5cb4c6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.782022] env[63028]: DEBUG oslo_vmware.api [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735406, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.462336} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.782022] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 760.782022] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 760.782022] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 760.782022] env[63028]: INFO nova.compute.manager [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Took 1.14 seconds to destroy the instance on the hypervisor. [ 760.782022] env[63028]: DEBUG oslo.service.loopingcall [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 760.782022] env[63028]: DEBUG nova.compute.manager [-] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 760.782022] env[63028]: DEBUG nova.network.neutron [-] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 760.783627] env[63028]: DEBUG nova.compute.manager [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 760.809949] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.250s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.812974] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.959s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.813265] env[63028]: DEBUG nova.objects.instance [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Lazy-loading 'resources' on Instance uuid 22713da1-ae53-4bbe-ae55-2490440cbd87 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 760.844998] env[63028]: INFO nova.scheduler.client.report [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Deleted allocations for instance 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0 [ 760.974035] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bddec1-f1f0-8d50-dcbd-03989d774569, 'name': SearchDatastore_Task, 'duration_secs': 0.015637} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.974653] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.974878] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] a4b0d948-d950-414a-b23f-faefa5ab038c/a4b0d948-d950-414a-b23f-faefa5ab038c.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 760.975201] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9396a9e9-e702-4f5f-9da2-ffd8dc57c821 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.990485] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 760.990485] env[63028]: value = "task-2735407" [ 760.990485] env[63028]: _type = "Task" [ 760.990485] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.006614] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.079680] env[63028]: DEBUG nova.compute.manager [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 761.264321] env[63028]: DEBUG nova.compute.manager [req-7f5460a3-71d9-46eb-9f15-2367363efdd8 req-7beb167f-3a04-4345-9952-6fe1d4cebaca service nova] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Received event network-vif-deleted-b6ea40f2-2217-44b1-bf3f-727a6649149a {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 761.264483] env[63028]: INFO nova.compute.manager [req-7f5460a3-71d9-46eb-9f15-2367363efdd8 req-7beb167f-3a04-4345-9952-6fe1d4cebaca service nova] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Neutron deleted interface b6ea40f2-2217-44b1-bf3f-727a6649149a; detaching it from the instance and deleting it from the info cache [ 761.264677] env[63028]: DEBUG nova.network.neutron [req-7f5460a3-71d9-46eb-9f15-2367363efdd8 req-7beb167f-3a04-4345-9952-6fe1d4cebaca service nova] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.296738] env[63028]: DEBUG nova.compute.manager [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 761.297888] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c42310b-a260-44eb-8de8-5b890f9d0d6b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.315176] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.352829] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed7e176c-ecb7-4708-9379-1888a2940075 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.639s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.509745] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735407, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.520784] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "f4718363-73b2-4016-8849-f75e98259023" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.521148] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "f4718363-73b2-4016-8849-f75e98259023" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.616224] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.628157] env[63028]: DEBUG nova.network.neutron [-] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.721207] env[63028]: DEBUG nova.compute.manager [req-e2bfb217-938f-40f2-bd52-31dc5d8f706f req-8a977e9a-f67d-4f51-ac86-d58d69e0e7bc service nova] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Received event network-vif-plugged-431bf6ff-554c-484a-8431-a1f18a9b937e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 761.721207] env[63028]: DEBUG oslo_concurrency.lockutils [req-e2bfb217-938f-40f2-bd52-31dc5d8f706f req-8a977e9a-f67d-4f51-ac86-d58d69e0e7bc service nova] Acquiring lock "af87f1a5-b413-4b26-be91-474ad1f73df8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.723397] env[63028]: DEBUG oslo_concurrency.lockutils [req-e2bfb217-938f-40f2-bd52-31dc5d8f706f req-8a977e9a-f67d-4f51-ac86-d58d69e0e7bc service nova] Lock "af87f1a5-b413-4b26-be91-474ad1f73df8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.723397] env[63028]: DEBUG oslo_concurrency.lockutils [req-e2bfb217-938f-40f2-bd52-31dc5d8f706f req-8a977e9a-f67d-4f51-ac86-d58d69e0e7bc service nova] Lock "af87f1a5-b413-4b26-be91-474ad1f73df8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.723397] env[63028]: DEBUG nova.compute.manager [req-e2bfb217-938f-40f2-bd52-31dc5d8f706f req-8a977e9a-f67d-4f51-ac86-d58d69e0e7bc service nova] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] No waiting events found dispatching network-vif-plugged-431bf6ff-554c-484a-8431-a1f18a9b937e {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 761.723397] env[63028]: WARNING nova.compute.manager [req-e2bfb217-938f-40f2-bd52-31dc5d8f706f req-8a977e9a-f67d-4f51-ac86-d58d69e0e7bc service nova] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Received unexpected event network-vif-plugged-431bf6ff-554c-484a-8431-a1f18a9b937e for instance with vm_state building and task_state spawning. [ 761.769306] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45c9cfff-d38f-4458-9fe2-430dea99a3a6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.780296] env[63028]: DEBUG nova.network.neutron [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Successfully updated port: 431bf6ff-554c-484a-8431-a1f18a9b937e {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 761.787023] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269d0555-24eb-46ec-a371-d597a390beb9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.810847] env[63028]: INFO nova.compute.manager [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] instance snapshotting [ 761.813761] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d26d8f2-cda2-4d89-92b9-1a4579aa5cd4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.836888] env[63028]: DEBUG nova.compute.manager [req-7f5460a3-71d9-46eb-9f15-2367363efdd8 req-7beb167f-3a04-4345-9952-6fe1d4cebaca service nova] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Detach interface failed, port_id=b6ea40f2-2217-44b1-bf3f-727a6649149a, reason: Instance 50e4934b-b9b1-4887-b5d1-95a37fbf4c41 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 761.861920] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b84655-91e4-41ff-b5b3-07682f204757 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.005191] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735407, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.788087} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.007199] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] a4b0d948-d950-414a-b23f-faefa5ab038c/a4b0d948-d950-414a-b23f-faefa5ab038c.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 762.007413] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 762.008534] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1577b339-6d57-4843-a0b3-ed62bdd4140d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.015541] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 762.015541] env[63028]: value = "task-2735408" [ 762.015541] env[63028]: _type = "Task" [ 762.015541] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.026576] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735408, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.032572] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59370607-fc3b-46ba-a397-85a245a99edb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.040853] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f44acf-8c56-462d-a474-b019e17f43cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.074642] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f0a7ff-ff5f-4466-b6e3-4d5b8f54e585 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.083749] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e612d5-525c-4cd1-a1d0-51a35e92ade7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.100474] env[63028]: DEBUG nova.compute.provider_tree [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.133825] env[63028]: INFO nova.compute.manager [-] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Took 1.35 seconds to deallocate network for instance. [ 762.284918] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "refresh_cache-af87f1a5-b413-4b26-be91-474ad1f73df8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.285124] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "refresh_cache-af87f1a5-b413-4b26-be91-474ad1f73df8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.285613] env[63028]: DEBUG nova.network.neutron [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.374237] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 762.374683] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-de30baa1-5505-4678-a851-3ad96d52995a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.387048] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 762.387048] env[63028]: value = "task-2735409" [ 762.387048] env[63028]: _type = "Task" [ 762.387048] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.394519] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735409, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.525859] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735408, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083022} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.526142] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 762.526928] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66cd9cc4-a76b-4f02-b212-725a61e93685 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.550290] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] a4b0d948-d950-414a-b23f-faefa5ab038c/a4b0d948-d950-414a-b23f-faefa5ab038c.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 762.551645] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-154a1894-ec4c-4b75-b0fb-ff1d22d7e4bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.574742] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 762.574742] env[63028]: value = "task-2735410" [ 762.574742] env[63028]: _type = "Task" [ 762.574742] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.587159] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735410, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.603520] env[63028]: DEBUG nova.scheduler.client.report [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 762.639889] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.828933] env[63028]: DEBUG nova.network.neutron [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.896496] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735409, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.029172] env[63028]: DEBUG nova.network.neutron [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Updating instance_info_cache with network_info: [{"id": "431bf6ff-554c-484a-8431-a1f18a9b937e", "address": "fa:16:3e:a1:a0:f4", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap431bf6ff-55", "ovs_interfaceid": "431bf6ff-554c-484a-8431-a1f18a9b937e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.086190] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735410, 'name': ReconfigVM_Task, 'duration_secs': 0.35738} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.086471] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Reconfigured VM instance instance-00000030 to attach disk [datastore2] a4b0d948-d950-414a-b23f-faefa5ab038c/a4b0d948-d950-414a-b23f-faefa5ab038c.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 763.087187] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c1ab4fe-57b0-4723-99c6-54454c31cb16 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.101338] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 763.101338] env[63028]: value = "task-2735411" [ 763.101338] env[63028]: _type = "Task" [ 763.101338] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.115144] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.300s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.115144] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735411, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.116726] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.616s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.119329] env[63028]: INFO nova.compute.claims [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 763.170539] env[63028]: INFO nova.scheduler.client.report [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Deleted allocations for instance 22713da1-ae53-4bbe-ae55-2490440cbd87 [ 763.399167] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735409, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.531525] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "refresh_cache-af87f1a5-b413-4b26-be91-474ad1f73df8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.531878] env[63028]: DEBUG nova.compute.manager [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Instance network_info: |[{"id": "431bf6ff-554c-484a-8431-a1f18a9b937e", "address": "fa:16:3e:a1:a0:f4", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap431bf6ff-55", "ovs_interfaceid": "431bf6ff-554c-484a-8431-a1f18a9b937e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 763.532332] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:a0:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '431bf6ff-554c-484a-8431-a1f18a9b937e', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 763.541499] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Creating folder: Project (8efc6d89903c454eb39136a76e0adef5). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 763.542875] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20a4d2d1-373b-4bf8-b014-082647cec5f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.557138] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Created folder: Project (8efc6d89903c454eb39136a76e0adef5) in parent group-v550570. [ 763.557372] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Creating folder: Instances. Parent ref: group-v550718. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 763.557635] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1196f1fb-f013-493a-a211-3f3cda1fc47f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.569212] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Created folder: Instances in parent group-v550718. [ 763.569941] env[63028]: DEBUG oslo.service.loopingcall [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 763.569941] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 763.570116] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d63d61e7-26d1-4a45-a7bc-8cc24407c14c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.592118] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 763.592118] env[63028]: value = "task-2735414" [ 763.592118] env[63028]: _type = "Task" [ 763.592118] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.601219] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735414, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.610478] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735411, 'name': Rename_Task, 'duration_secs': 0.17488} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.610890] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 763.611203] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-201180e2-bc9d-4fb9-ad1c-26834d672df1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.618324] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 763.618324] env[63028]: value = "task-2735415" [ 763.618324] env[63028]: _type = "Task" [ 763.618324] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.631718] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735415, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.683622] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ce407e48-ce4a-4a11-b901-4c905a15470b tempest-ServerMetadataNegativeTestJSON-1185284913 tempest-ServerMetadataNegativeTestJSON-1185284913-project-member] Lock "22713da1-ae53-4bbe-ae55-2490440cbd87" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.731s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.759175] env[63028]: DEBUG nova.compute.manager [req-a24f733e-6dcc-481e-bf7c-188571842828 req-abc8cf36-8b6a-494f-b749-24ed99620b4b service nova] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Received event network-changed-431bf6ff-554c-484a-8431-a1f18a9b937e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 763.759490] env[63028]: DEBUG nova.compute.manager [req-a24f733e-6dcc-481e-bf7c-188571842828 req-abc8cf36-8b6a-494f-b749-24ed99620b4b service nova] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Refreshing instance network info cache due to event network-changed-431bf6ff-554c-484a-8431-a1f18a9b937e. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 763.759640] env[63028]: DEBUG oslo_concurrency.lockutils [req-a24f733e-6dcc-481e-bf7c-188571842828 req-abc8cf36-8b6a-494f-b749-24ed99620b4b service nova] Acquiring lock "refresh_cache-af87f1a5-b413-4b26-be91-474ad1f73df8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.759818] env[63028]: DEBUG oslo_concurrency.lockutils [req-a24f733e-6dcc-481e-bf7c-188571842828 req-abc8cf36-8b6a-494f-b749-24ed99620b4b service nova] Acquired lock "refresh_cache-af87f1a5-b413-4b26-be91-474ad1f73df8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.759957] env[63028]: DEBUG nova.network.neutron [req-a24f733e-6dcc-481e-bf7c-188571842828 req-abc8cf36-8b6a-494f-b749-24ed99620b4b service nova] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Refreshing network info cache for port 431bf6ff-554c-484a-8431-a1f18a9b937e {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 763.897931] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735409, 'name': CreateSnapshot_Task, 'duration_secs': 1.28554} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.898278] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 763.899043] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8152d919-85c9-4ae7-9f02-9b515f191937 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.102302] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735414, 'name': CreateVM_Task, 'duration_secs': 0.401733} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.102479] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 764.103197] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.103365] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.103700] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 764.104413] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d34084f-267b-478c-b3e0-bab0d3529b18 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.109522] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 764.109522] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52395e64-9fb6-3ad1-1084-1e4ece457c31" [ 764.109522] env[63028]: _type = "Task" [ 764.109522] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.118561] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52395e64-9fb6-3ad1-1084-1e4ece457c31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.127679] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735415, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.424673] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 764.428048] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f40ba0cf-685d-4d9e-bf55-ad494fe60b54 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.440228] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 764.440228] env[63028]: value = "task-2735416" [ 764.440228] env[63028]: _type = "Task" [ 764.440228] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.448500] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735416, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.621456] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52395e64-9fb6-3ad1-1084-1e4ece457c31, 'name': SearchDatastore_Task, 'duration_secs': 0.029823} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.628615] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.628910] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 764.629192] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.629395] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.629522] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.631170] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e7493a9-9801-49c6-b991-039e72151f1d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.643462] env[63028]: DEBUG oslo_vmware.api [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735415, 'name': PowerOnVM_Task, 'duration_secs': 0.51995} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.643710] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 764.643905] env[63028]: INFO nova.compute.manager [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Took 9.17 seconds to spawn the instance on the hypervisor. [ 764.644481] env[63028]: DEBUG nova.compute.manager [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 764.645316] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a76a6bc-0c35-4e16-96e3-9cf65b09f27f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.649900] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.650181] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 764.654927] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31dc2732-51ef-47e0-8c09-e2b0ca5f9e15 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.665603] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 764.665603] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52884ea7-c86a-dee3-bc3e-2d6667e8576f" [ 764.665603] env[63028]: _type = "Task" [ 764.665603] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.676271] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52884ea7-c86a-dee3-bc3e-2d6667e8576f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.721683] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e6ae0c-8dc8-454c-a0a6-128efd4c04a8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.732639] env[63028]: DEBUG nova.network.neutron [req-a24f733e-6dcc-481e-bf7c-188571842828 req-abc8cf36-8b6a-494f-b749-24ed99620b4b service nova] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Updated VIF entry in instance network info cache for port 431bf6ff-554c-484a-8431-a1f18a9b937e. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 764.733021] env[63028]: DEBUG nova.network.neutron [req-a24f733e-6dcc-481e-bf7c-188571842828 req-abc8cf36-8b6a-494f-b749-24ed99620b4b service nova] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Updating instance_info_cache with network_info: [{"id": "431bf6ff-554c-484a-8431-a1f18a9b937e", "address": "fa:16:3e:a1:a0:f4", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap431bf6ff-55", "ovs_interfaceid": "431bf6ff-554c-484a-8431-a1f18a9b937e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.735410] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d7c4d2-17b6-4b08-8bef-2004dc9bbb4e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.774276] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0920c3c3-add9-4eff-91d8-585da7588421 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.783271] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8cfa08-5016-4056-80a1-8b5876208272 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.798638] env[63028]: DEBUG nova.compute.provider_tree [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.953861] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735416, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.177403] env[63028]: INFO nova.compute.manager [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Took 35.53 seconds to build instance. [ 765.182660] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52884ea7-c86a-dee3-bc3e-2d6667e8576f, 'name': SearchDatastore_Task, 'duration_secs': 0.017352} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.183591] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d127a821-800b-4174-bd51-d5ec13d6d853 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.189959] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 765.189959] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a7e414-3383-8973-58dd-a08ce4412042" [ 765.189959] env[63028]: _type = "Task" [ 765.189959] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.200083] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a7e414-3383-8973-58dd-a08ce4412042, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.239717] env[63028]: DEBUG oslo_concurrency.lockutils [req-a24f733e-6dcc-481e-bf7c-188571842828 req-abc8cf36-8b6a-494f-b749-24ed99620b4b service nova] Releasing lock "refresh_cache-af87f1a5-b413-4b26-be91-474ad1f73df8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.303833] env[63028]: DEBUG nova.scheduler.client.report [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 765.448601] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735416, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.678828] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4f6edbf0-2dbd-479d-b1c2-51e25413dafe tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "a4b0d948-d950-414a-b23f-faefa5ab038c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.060s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.705937] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a7e414-3383-8973-58dd-a08ce4412042, 'name': SearchDatastore_Task, 'duration_secs': 0.014759} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.706149] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.706413] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] af87f1a5-b413-4b26-be91-474ad1f73df8/af87f1a5-b413-4b26-be91-474ad1f73df8.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 765.706683] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eeb30d71-db08-42c5-ab73-d5176e92966f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.717364] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 765.717364] env[63028]: value = "task-2735417" [ 765.717364] env[63028]: _type = "Task" [ 765.717364] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.729159] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735417, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.770943] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "interface-5a340e31-678c-437e-aa4e-07d5d9f4334f-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.773624] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-5a340e31-678c-437e-aa4e-07d5d9f4334f-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.773624] env[63028]: DEBUG nova.objects.instance [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'flavor' on Instance uuid 5a340e31-678c-437e-aa4e-07d5d9f4334f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 765.811785] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.811785] env[63028]: DEBUG nova.compute.manager [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 765.814064] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.376s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.817017] env[63028]: INFO nova.compute.claims [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.952390] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735416, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.184230] env[63028]: DEBUG nova.compute.manager [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 766.228152] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735417, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.327499] env[63028]: DEBUG nova.compute.utils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 766.330598] env[63028]: DEBUG nova.compute.manager [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 766.330777] env[63028]: DEBUG nova.network.neutron [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 766.375787] env[63028]: DEBUG nova.policy [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebee998098894863bb772bd6b77fe4df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91a3f78ba4514500bfd4ed81b74526e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 766.388284] env[63028]: DEBUG nova.objects.instance [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'pci_requests' on Instance uuid 5a340e31-678c-437e-aa4e-07d5d9f4334f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 766.454748] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735416, 'name': CloneVM_Task} progress is 95%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.714605] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.735172] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735417, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521866} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.737019] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] af87f1a5-b413-4b26-be91-474ad1f73df8/af87f1a5-b413-4b26-be91-474ad1f73df8.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 766.737019] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 766.737019] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1694c25-83da-49c1-a4b4-19358be08a5d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.751216] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 766.751216] env[63028]: value = "task-2735418" [ 766.751216] env[63028]: _type = "Task" [ 766.751216] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.761346] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735418, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.832347] env[63028]: DEBUG nova.compute.manager [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 766.892281] env[63028]: DEBUG nova.objects.base [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Object Instance<5a340e31-678c-437e-aa4e-07d5d9f4334f> lazy-loaded attributes: flavor,pci_requests {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 766.892520] env[63028]: DEBUG nova.network.neutron [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 766.940051] env[63028]: DEBUG nova.network.neutron [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Successfully created port: 63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 766.960673] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735416, 'name': CloneVM_Task, 'duration_secs': 2.454342} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.961056] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Created linked-clone VM from snapshot [ 766.962548] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbe53d6-b2e1-43a6-8761-8f16380bfa45 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.970310] env[63028]: DEBUG nova.policy [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b48f3f2a85945379bdb33bf153bde9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25a6457f62d149629c09589feb1a550c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 766.980211] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Uploading image e5c2bb00-9e6e-489f-8c03-b884e4f87e0e {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 767.016988] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 767.016988] env[63028]: value = "vm-550722" [ 767.016988] env[63028]: _type = "VirtualMachine" [ 767.016988] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 767.017389] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3719f446-2a61-4c69-9414-4a917c3c6ec1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.028344] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lease: (returnval){ [ 767.028344] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bcdfab-357c-e8c4-c221-440e995f59cc" [ 767.028344] env[63028]: _type = "HttpNfcLease" [ 767.028344] env[63028]: } obtained for exporting VM: (result){ [ 767.028344] env[63028]: value = "vm-550722" [ 767.028344] env[63028]: _type = "VirtualMachine" [ 767.028344] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 767.028344] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the lease: (returnval){ [ 767.028344] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bcdfab-357c-e8c4-c221-440e995f59cc" [ 767.028344] env[63028]: _type = "HttpNfcLease" [ 767.028344] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 767.036427] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 767.036427] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bcdfab-357c-e8c4-c221-440e995f59cc" [ 767.036427] env[63028]: _type = "HttpNfcLease" [ 767.036427] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 767.263022] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735418, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100872} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.264744] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 767.264744] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946ff6dc-2096-424c-a5d2-7982eb0b842f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.293073] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] af87f1a5-b413-4b26-be91-474ad1f73df8/af87f1a5-b413-4b26-be91-474ad1f73df8.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 767.297143] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2b93256-32ba-4588-b130-6b79df53a4a1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.321123] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 767.321123] env[63028]: value = "task-2735420" [ 767.321123] env[63028]: _type = "Task" [ 767.321123] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.337631] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735420, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.364364] env[63028]: DEBUG nova.network.neutron [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Successfully created port: 192a1d7c-32a1-4b52-944b-c6c3b6c52f93 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 767.490572] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e6d49b-f603-4b1f-9502-16208cef306e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.497837] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc60bd2-b89f-4f63-881e-875163de489c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.537585] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26249c77-f705-46d5-ad6f-dcd063b3c057 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.544437] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 767.544437] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bcdfab-357c-e8c4-c221-440e995f59cc" [ 767.544437] env[63028]: _type = "HttpNfcLease" [ 767.544437] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 767.546767] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 767.546767] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bcdfab-357c-e8c4-c221-440e995f59cc" [ 767.546767] env[63028]: _type = "HttpNfcLease" [ 767.546767] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 767.547674] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5bc8d9-566e-42db-9fb3-7957ba54f2ae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.552679] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb236868-cc74-4ed7-89f8-38f6c089a944 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.567671] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5222cd1b-9682-3ff0-c7ce-26183c6c453f/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 767.567671] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5222cd1b-9682-3ff0-c7ce-26183c6c453f/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 767.574923] env[63028]: DEBUG nova.compute.provider_tree [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.680716] env[63028]: DEBUG nova.compute.manager [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 767.684769] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37458815-6619-4d0d-8b14-76d9637ab7ca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.690353] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-59695b3f-9f9a-4b2a-8dd3-bb10d66c6ecb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.838794] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735420, 'name': ReconfigVM_Task, 'duration_secs': 0.500985} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.838794] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Reconfigured VM instance instance-00000031 to attach disk [datastore2] af87f1a5-b413-4b26-be91-474ad1f73df8/af87f1a5-b413-4b26-be91-474ad1f73df8.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 767.839414] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25a104a1-3dac-4e97-a2ca-0a95db00b371 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.849963] env[63028]: DEBUG nova.compute.manager [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 767.857949] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 767.857949] env[63028]: value = "task-2735421" [ 767.857949] env[63028]: _type = "Task" [ 767.857949] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.873569] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735421, 'name': Rename_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.976836] env[63028]: DEBUG nova.virt.hardware [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 767.977318] env[63028]: DEBUG nova.virt.hardware [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.977603] env[63028]: DEBUG nova.virt.hardware [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 767.977719] env[63028]: DEBUG nova.virt.hardware [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.977861] env[63028]: DEBUG nova.virt.hardware [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 767.978029] env[63028]: DEBUG nova.virt.hardware [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 767.978236] env[63028]: DEBUG nova.virt.hardware [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 767.978389] env[63028]: DEBUG nova.virt.hardware [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 767.978548] env[63028]: DEBUG nova.virt.hardware [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 767.978710] env[63028]: DEBUG nova.virt.hardware [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 767.978920] env[63028]: DEBUG nova.virt.hardware [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 767.979796] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d620ab6d-2202-4dfa-bb37-a86d35576c07 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.994282] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3852c471-26f1-4199-a5f2-be6bbbfb5c08 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.080323] env[63028]: DEBUG nova.scheduler.client.report [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 768.205020] env[63028]: INFO nova.compute.manager [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] instance snapshotting [ 768.209430] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7605c43-e110-4827-8f74-1e0d3219398f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.229185] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0606b706-239c-4a7b-986f-5851fa90b25f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.369832] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735421, 'name': Rename_Task, 'duration_secs': 0.216152} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.370319] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 768.370742] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cfbce276-48a1-42d9-8408-38c463a43f4e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.383567] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 768.383567] env[63028]: value = "task-2735422" [ 768.383567] env[63028]: _type = "Task" [ 768.383567] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.391421] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735422, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.587216] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.772s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.587216] env[63028]: DEBUG nova.compute.manager [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 768.593497] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.501s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.598019] env[63028]: INFO nova.compute.claims [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.747668] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 768.747668] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-26369a7f-c0c4-4c47-a802-505f8bf2bad5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.761594] env[63028]: DEBUG nova.network.neutron [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Successfully updated port: 63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 768.774605] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 768.774605] env[63028]: value = "task-2735423" [ 768.774605] env[63028]: _type = "Task" [ 768.774605] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.787499] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735423, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.893222] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735422, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.967390] env[63028]: DEBUG oslo_concurrency.lockutils [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "d663c2df-ae54-4c50-a70f-e2180700c700" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.967729] env[63028]: DEBUG oslo_concurrency.lockutils [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "d663c2df-ae54-4c50-a70f-e2180700c700" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.101875] env[63028]: DEBUG nova.compute.utils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 769.106620] env[63028]: DEBUG nova.compute.manager [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 769.107211] env[63028]: DEBUG nova.network.neutron [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 769.133376] env[63028]: DEBUG nova.network.neutron [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Successfully updated port: 192a1d7c-32a1-4b52-944b-c6c3b6c52f93 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 769.168513] env[63028]: DEBUG nova.policy [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab9cb927bc134277bb980682fef01978', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ef9a42771824708832a74238bbe89c0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 769.267600] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "refresh_cache-da88308f-ce62-40af-adae-e38aa506bdd9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.267694] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquired lock "refresh_cache-da88308f-ce62-40af-adae-e38aa506bdd9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.267990] env[63028]: DEBUG nova.network.neutron [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 769.286749] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735423, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.394169] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735422, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.445838] env[63028]: DEBUG nova.network.neutron [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Successfully created port: 34861b32-33da-4ca0-b4ae-0031ab0f8619 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.471277] env[63028]: DEBUG nova.compute.utils [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 769.607714] env[63028]: DEBUG nova.compute.manager [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 769.639497] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.639742] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.640040] env[63028]: DEBUG nova.network.neutron [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 769.788276] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735423, 'name': CreateSnapshot_Task, 'duration_secs': 0.854981} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.791014] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 769.792032] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be3ee5b-503b-4390-9692-fcc6ba6b27ee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.806427] env[63028]: DEBUG nova.network.neutron [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.897708] env[63028]: DEBUG oslo_vmware.api [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735422, 'name': PowerOnVM_Task, 'duration_secs': 1.387803} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.897983] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 769.898209] env[63028]: INFO nova.compute.manager [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Took 9.31 seconds to spawn the instance on the hypervisor. [ 769.898386] env[63028]: DEBUG nova.compute.manager [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 769.899224] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721bc6fc-a19e-459d-bda0-7ce2dc25f9ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.976786] env[63028]: DEBUG oslo_concurrency.lockutils [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "d663c2df-ae54-4c50-a70f-e2180700c700" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.978517] env[63028]: DEBUG nova.network.neutron [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Updating instance_info_cache with network_info: [{"id": "63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5", "address": "fa:16:3e:4f:5f:59", "network": {"id": "e3a8845b-9fc6-46bd-8272-501135c875ad", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2047431351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f78ba4514500bfd4ed81b74526e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63f3891b-02", "ovs_interfaceid": "63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.146527] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bbfa66-2209-48ea-9144-9df0e25ee1c5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.158566] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93738d3-d0da-4b29-9c98-8a83e2546bea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.190947] env[63028]: WARNING nova.network.neutron [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] c2f1496c-e3fd-43db-a032-12cdacdb4e46 already exists in list: networks containing: ['c2f1496c-e3fd-43db-a032-12cdacdb4e46']. ignoring it [ 770.193429] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba31a23b-2aec-46aa-98f0-15c7fb8225d5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.202539] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c40245-3f9b-45f3-9086-ccea1166a6a5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.217642] env[63028]: DEBUG nova.compute.provider_tree [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.313651] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 770.315036] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-858af11f-4ab0-4ea3-aac3-a9e34d813615 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.327058] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 770.327058] env[63028]: value = "task-2735424" [ 770.327058] env[63028]: _type = "Task" [ 770.327058] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.337270] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735424, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.418754] env[63028]: INFO nova.compute.manager [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Took 35.91 seconds to build instance. [ 770.482487] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Releasing lock "refresh_cache-da88308f-ce62-40af-adae-e38aa506bdd9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.482817] env[63028]: DEBUG nova.compute.manager [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Instance network_info: |[{"id": "63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5", "address": "fa:16:3e:4f:5f:59", "network": {"id": "e3a8845b-9fc6-46bd-8272-501135c875ad", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2047431351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f78ba4514500bfd4ed81b74526e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63f3891b-02", "ovs_interfaceid": "63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 770.483338] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:5f:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f68ebd2a-3c68-48db-8c32-8a01497fc2e7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 770.490665] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Creating folder: Project (91a3f78ba4514500bfd4ed81b74526e3). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 770.491888] env[63028]: DEBUG nova.network.neutron [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Updating instance_info_cache with network_info: [{"id": "39f160e2-809e-4b2c-9424-70448b807385", "address": "fa:16:3e:f5:5d:b4", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39f160e2-80", "ovs_interfaceid": "39f160e2-809e-4b2c-9424-70448b807385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "192a1d7c-32a1-4b52-944b-c6c3b6c52f93", "address": "fa:16:3e:c3:2a:53", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap192a1d7c-32", "ovs_interfaceid": "192a1d7c-32a1-4b52-944b-c6c3b6c52f93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.493127] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60071703-7455-4554-8381-4a2a3fb91d79 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.507441] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Created folder: Project (91a3f78ba4514500bfd4ed81b74526e3) in parent group-v550570. [ 770.507618] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Creating folder: Instances. Parent ref: group-v550725. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 770.508506] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df4e474d-1364-4a54-a5e8-99f51b4227a6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.519765] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Created folder: Instances in parent group-v550725. [ 770.520715] env[63028]: DEBUG oslo.service.loopingcall [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 770.520715] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 770.520715] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12ed4308-17fa-4b69-a5c8-5f10252d59b9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.541805] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 770.541805] env[63028]: value = "task-2735427" [ 770.541805] env[63028]: _type = "Task" [ 770.541805] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.554575] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735427, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.622291] env[63028]: DEBUG nova.compute.manager [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 770.652253] env[63028]: DEBUG nova.virt.hardware [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 770.652686] env[63028]: DEBUG nova.virt.hardware [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.652961] env[63028]: DEBUG nova.virt.hardware [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 770.653427] env[63028]: DEBUG nova.virt.hardware [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.653594] env[63028]: DEBUG nova.virt.hardware [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 770.653840] env[63028]: DEBUG nova.virt.hardware [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 770.654234] env[63028]: DEBUG nova.virt.hardware [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 770.654513] env[63028]: DEBUG nova.virt.hardware [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 770.654805] env[63028]: DEBUG nova.virt.hardware [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 770.655136] env[63028]: DEBUG nova.virt.hardware [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 770.655448] env[63028]: DEBUG nova.virt.hardware [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 770.656446] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ecafc0-0792-49ae-9314-68f68ad6a367 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.667146] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8813e22b-8467-4ce4-92ea-a5036b174fc2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.721346] env[63028]: DEBUG nova.scheduler.client.report [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 770.838741] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735424, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.923279] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e5cf0a0-0a56-464b-9790-52f94aa8fe18 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "af87f1a5-b413-4b26-be91-474ad1f73df8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.165s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.967274] env[63028]: DEBUG nova.network.neutron [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Successfully updated port: 34861b32-33da-4ca0-b4ae-0031ab0f8619 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 770.994100] env[63028]: DEBUG nova.compute.manager [req-476a3ab8-9d2d-45aa-84c3-ba5b1c775933 req-b071ba3e-653b-4922-bbd7-602745b3c2b8 service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Received event network-vif-plugged-63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 770.994433] env[63028]: DEBUG oslo_concurrency.lockutils [req-476a3ab8-9d2d-45aa-84c3-ba5b1c775933 req-b071ba3e-653b-4922-bbd7-602745b3c2b8 service nova] Acquiring lock "da88308f-ce62-40af-adae-e38aa506bdd9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.994709] env[63028]: DEBUG oslo_concurrency.lockutils [req-476a3ab8-9d2d-45aa-84c3-ba5b1c775933 req-b071ba3e-653b-4922-bbd7-602745b3c2b8 service nova] Lock "da88308f-ce62-40af-adae-e38aa506bdd9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.995304] env[63028]: DEBUG oslo_concurrency.lockutils [req-476a3ab8-9d2d-45aa-84c3-ba5b1c775933 req-b071ba3e-653b-4922-bbd7-602745b3c2b8 service nova] Lock "da88308f-ce62-40af-adae-e38aa506bdd9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.995304] env[63028]: DEBUG nova.compute.manager [req-476a3ab8-9d2d-45aa-84c3-ba5b1c775933 req-b071ba3e-653b-4922-bbd7-602745b3c2b8 service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] No waiting events found dispatching network-vif-plugged-63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 770.995428] env[63028]: WARNING nova.compute.manager [req-476a3ab8-9d2d-45aa-84c3-ba5b1c775933 req-b071ba3e-653b-4922-bbd7-602745b3c2b8 service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Received unexpected event network-vif-plugged-63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5 for instance with vm_state building and task_state spawning. [ 770.996263] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.996939] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.997155] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.998355] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71888ffb-9812-4f66-acf0-0ec52ebffe25 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.019519] env[63028]: DEBUG nova.virt.hardware [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 771.019747] env[63028]: DEBUG nova.virt.hardware [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 771.019862] env[63028]: DEBUG nova.virt.hardware [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 771.020107] env[63028]: DEBUG nova.virt.hardware [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 771.020271] env[63028]: DEBUG nova.virt.hardware [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 771.020417] env[63028]: DEBUG nova.virt.hardware [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 771.020621] env[63028]: DEBUG nova.virt.hardware [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 771.020781] env[63028]: DEBUG nova.virt.hardware [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 771.020946] env[63028]: DEBUG nova.virt.hardware [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 771.021122] env[63028]: DEBUG nova.virt.hardware [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 771.021298] env[63028]: DEBUG nova.virt.hardware [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 771.027418] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Reconfiguring VM to attach interface {{(pid=63028) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 771.028169] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2125d42b-951f-40b8-9123-0bd34836085e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.047986] env[63028]: DEBUG oslo_vmware.api [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 771.047986] env[63028]: value = "task-2735428" [ 771.047986] env[63028]: _type = "Task" [ 771.047986] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.054474] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735427, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.055417] env[63028]: DEBUG oslo_concurrency.lockutils [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "d663c2df-ae54-4c50-a70f-e2180700c700" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.055643] env[63028]: DEBUG oslo_concurrency.lockutils [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "d663c2df-ae54-4c50-a70f-e2180700c700" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.055862] env[63028]: INFO nova.compute.manager [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Attaching volume fdc60726-6ead-4fa7-9b0a-56a0c631789c to /dev/sdb [ 771.061830] env[63028]: DEBUG oslo_vmware.api [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735428, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.098283] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e68ce64-c217-46a5-b4d3-31cc5bc61d89 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.105461] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f7aa32-92e8-4a72-9b98-b940f85a7856 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.122060] env[63028]: DEBUG nova.virt.block_device [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Updating existing volume attachment record: 3db981b9-6d93-4890-9cb0-4b784535c70e {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 771.229415] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.635s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.229564] env[63028]: DEBUG nova.compute.manager [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 771.232707] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.648s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.234236] env[63028]: INFO nova.compute.claims [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 771.338473] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735424, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.380621] env[63028]: DEBUG nova.compute.manager [req-cb03b125-c042-4fbb-948f-22e906b7691f req-7c5c7401-b0f1-47dd-ae3a-747fa512c260 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Received event network-vif-plugged-192a1d7c-32a1-4b52-944b-c6c3b6c52f93 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 771.380832] env[63028]: DEBUG oslo_concurrency.lockutils [req-cb03b125-c042-4fbb-948f-22e906b7691f req-7c5c7401-b0f1-47dd-ae3a-747fa512c260 service nova] Acquiring lock "5a340e31-678c-437e-aa4e-07d5d9f4334f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.381081] env[63028]: DEBUG oslo_concurrency.lockutils [req-cb03b125-c042-4fbb-948f-22e906b7691f req-7c5c7401-b0f1-47dd-ae3a-747fa512c260 service nova] Lock "5a340e31-678c-437e-aa4e-07d5d9f4334f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.381300] env[63028]: DEBUG oslo_concurrency.lockutils [req-cb03b125-c042-4fbb-948f-22e906b7691f req-7c5c7401-b0f1-47dd-ae3a-747fa512c260 service nova] Lock "5a340e31-678c-437e-aa4e-07d5d9f4334f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.381531] env[63028]: DEBUG nova.compute.manager [req-cb03b125-c042-4fbb-948f-22e906b7691f req-7c5c7401-b0f1-47dd-ae3a-747fa512c260 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] No waiting events found dispatching network-vif-plugged-192a1d7c-32a1-4b52-944b-c6c3b6c52f93 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 771.381730] env[63028]: WARNING nova.compute.manager [req-cb03b125-c042-4fbb-948f-22e906b7691f req-7c5c7401-b0f1-47dd-ae3a-747fa512c260 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Received unexpected event network-vif-plugged-192a1d7c-32a1-4b52-944b-c6c3b6c52f93 for instance with vm_state active and task_state None. [ 771.426177] env[63028]: DEBUG nova.compute.manager [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 771.470520] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "refresh_cache-cd11b318-9158-4f1d-8aa8-1c9d565bb5d5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.470689] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "refresh_cache-cd11b318-9158-4f1d-8aa8-1c9d565bb5d5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.470830] env[63028]: DEBUG nova.network.neutron [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 771.557083] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735427, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.563576] env[63028]: DEBUG oslo_vmware.api [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735428, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.739931] env[63028]: DEBUG nova.compute.utils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 771.749982] env[63028]: DEBUG nova.compute.manager [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 771.749982] env[63028]: DEBUG nova.network.neutron [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 771.798401] env[63028]: DEBUG nova.policy [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27d72488efa64c0089e4595fb9a9477b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3008f14179f4461aab24b22ce9174433', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 771.843070] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735424, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.955791] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.012962] env[63028]: DEBUG nova.network.neutron [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.060106] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735427, 'name': CreateVM_Task, 'duration_secs': 1.489211} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.063731] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 772.064183] env[63028]: DEBUG oslo_vmware.api [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735428, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.064858] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.065057] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.065414] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 772.065773] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-449bee0c-b8be-43c3-a010-1359c1905c80 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.074256] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 772.074256] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5207a9d3-7f36-f1b4-5ef8-de7992409cda" [ 772.074256] env[63028]: _type = "Task" [ 772.074256] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.085947] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5207a9d3-7f36-f1b4-5ef8-de7992409cda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.140444] env[63028]: DEBUG nova.network.neutron [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Successfully created port: 4bf1e126-9048-489d-9c61-18ebe4c2af31 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.251981] env[63028]: DEBUG nova.compute.manager [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 772.266249] env[63028]: DEBUG nova.network.neutron [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Updating instance_info_cache with network_info: [{"id": "34861b32-33da-4ca0-b4ae-0031ab0f8619", "address": "fa:16:3e:46:83:a5", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34861b32-33", "ovs_interfaceid": "34861b32-33da-4ca0-b4ae-0031ab0f8619", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.347223] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735424, 'name': CloneVM_Task, 'duration_secs': 1.891445} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.348095] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Created linked-clone VM from snapshot [ 772.348829] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3acad6d-6d0e-4ea6-bf95-af474c6878ae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.362998] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Uploading image 665ff5e9-b303-4514-9dfb-771d74a19520 {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 772.377589] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 772.377904] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a91a7b1d-2817-4bb4-85d5-51d78f0351d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.391741] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 772.391741] env[63028]: value = "task-2735432" [ 772.391741] env[63028]: _type = "Task" [ 772.391741] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.406050] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735432, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.561665] env[63028]: DEBUG oslo_vmware.api [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735428, 'name': ReconfigVM_Task, 'duration_secs': 1.19337} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.565381] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.565653] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Reconfigured VM to attach interface {{(pid=63028) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 772.590339] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5207a9d3-7f36-f1b4-5ef8-de7992409cda, 'name': SearchDatastore_Task, 'duration_secs': 0.025046} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.590905] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.590905] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 772.591231] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.591403] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.591678] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 772.594610] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a163f29-e0e7-4dca-b7ab-fc780434bd62 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.605540] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 772.605737] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 772.606624] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7c56bb1-7e88-40d9-a921-4ad3fae3ac4d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.616610] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 772.616610] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e33d06-7e1b-410c-e6d2-3305b54852f6" [ 772.616610] env[63028]: _type = "Task" [ 772.616610] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.630770] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e33d06-7e1b-410c-e6d2-3305b54852f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.769645] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "refresh_cache-cd11b318-9158-4f1d-8aa8-1c9d565bb5d5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.769819] env[63028]: DEBUG nova.compute.manager [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Instance network_info: |[{"id": "34861b32-33da-4ca0-b4ae-0031ab0f8619", "address": "fa:16:3e:46:83:a5", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34861b32-33", "ovs_interfaceid": "34861b32-33da-4ca0-b4ae-0031ab0f8619", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 772.770396] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:83:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c330dbdb-ad20-4e7e-8a12-66e4a914a84a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34861b32-33da-4ca0-b4ae-0031ab0f8619', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 772.779094] env[63028]: DEBUG oslo.service.loopingcall [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 772.782149] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 772.782581] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f53c2348-495e-495f-a865-aa2d34b049d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.809374] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.809374] env[63028]: value = "task-2735433" [ 772.809374] env[63028]: _type = "Task" [ 772.809374] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.822283] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735433, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.840179] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed62391-c1b8-4600-ae25-3b73c3e991ee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.851747] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399ce142-3058-43fc-ad77-8c093d5c39a8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.885423] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd766aa0-bf88-4e61-b622-14f2bde88aa2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.898697] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a597a7-f0f9-49ed-9203-a5fc0dcb25c7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.910333] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735432, 'name': Destroy_Task, 'duration_secs': 0.457795} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.920028] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Destroyed the VM [ 772.920028] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 772.920229] env[63028]: DEBUG nova.compute.provider_tree [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.921541] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-87cbade3-3335-4bea-a529-058b333e3ac2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.929712] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 772.929712] env[63028]: value = "task-2735434" [ 772.929712] env[63028]: _type = "Task" [ 772.929712] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.942383] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735434, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.043354] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.043579] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.074462] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb856bf8-9faa-4cfd-8b84-a007f985af94 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-5a340e31-678c-437e-aa4e-07d5d9f4334f-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.303s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.131752] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e33d06-7e1b-410c-e6d2-3305b54852f6, 'name': SearchDatastore_Task, 'duration_secs': 0.01382} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.132020] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a81d4967-0604-4a43-a29c-77b58d5fde1c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.139909] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 773.139909] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527df8b4-5651-0205-4f0c-bf0af335f5d0" [ 773.139909] env[63028]: _type = "Task" [ 773.139909] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.152413] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527df8b4-5651-0205-4f0c-bf0af335f5d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.186322] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "af87f1a5-b413-4b26-be91-474ad1f73df8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.186602] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "af87f1a5-b413-4b26-be91-474ad1f73df8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.186811] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "af87f1a5-b413-4b26-be91-474ad1f73df8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.187036] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "af87f1a5-b413-4b26-be91-474ad1f73df8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.187233] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "af87f1a5-b413-4b26-be91-474ad1f73df8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.189753] env[63028]: INFO nova.compute.manager [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Terminating instance [ 773.266305] env[63028]: DEBUG nova.compute.manager [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 773.295668] env[63028]: DEBUG nova.virt.hardware [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.295958] env[63028]: DEBUG nova.virt.hardware [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.298664] env[63028]: DEBUG nova.virt.hardware [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.299162] env[63028]: DEBUG nova.virt.hardware [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.299466] env[63028]: DEBUG nova.virt.hardware [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.304203] env[63028]: DEBUG nova.virt.hardware [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.304486] env[63028]: DEBUG nova.virt.hardware [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.304665] env[63028]: DEBUG nova.virt.hardware [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.304842] env[63028]: DEBUG nova.virt.hardware [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.305030] env[63028]: DEBUG nova.virt.hardware [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.305223] env[63028]: DEBUG nova.virt.hardware [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.306732] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d01621e-a35c-427d-96cc-c7d76db9db8e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.328803] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789bdeff-b6a2-4ea1-9d43-ea3fb85b9998 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.334963] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735433, 'name': CreateVM_Task, 'duration_secs': 0.489803} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.335668] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 773.337118] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.337347] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.337772] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 773.338130] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6d5f6a7-25d3-42bc-9576-ecb672c07bb7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.359142] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 773.359142] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52694aa9-8445-02f2-a74c-b8eff78c5264" [ 773.359142] env[63028]: _type = "Task" [ 773.359142] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.371782] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52694aa9-8445-02f2-a74c-b8eff78c5264, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.425012] env[63028]: DEBUG nova.scheduler.client.report [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 773.443688] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735434, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.651790] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527df8b4-5651-0205-4f0c-bf0af335f5d0, 'name': SearchDatastore_Task, 'duration_secs': 0.022601} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.652234] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.652544] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] da88308f-ce62-40af-adae-e38aa506bdd9/da88308f-ce62-40af-adae-e38aa506bdd9.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 773.652900] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c04ec5c-6ac9-4a0c-aa47-0ee3a6e09ebf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.662570] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 773.662570] env[63028]: value = "task-2735436" [ 773.662570] env[63028]: _type = "Task" [ 773.662570] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.673211] env[63028]: DEBUG nova.network.neutron [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Successfully updated port: 4bf1e126-9048-489d-9c61-18ebe4c2af31 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 773.683481] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2735436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.695824] env[63028]: DEBUG nova.compute.manager [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 773.696233] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 773.697441] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383f48a7-1cb6-4aef-be5d-f6aebb666a43 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.709351] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 773.709817] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7986c357-77b1-4cad-b14f-4c454b13c8b9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.719263] env[63028]: DEBUG oslo_vmware.api [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 773.719263] env[63028]: value = "task-2735437" [ 773.719263] env[63028]: _type = "Task" [ 773.719263] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.737115] env[63028]: DEBUG oslo_vmware.api [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735437, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.872467] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52694aa9-8445-02f2-a74c-b8eff78c5264, 'name': SearchDatastore_Task, 'duration_secs': 0.014717} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.872795] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.873057] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 773.873319] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.873472] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.873695] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 773.873975] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-accb790f-4567-44e6-ae9a-255ce5acba5c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.886608] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 773.886796] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 773.887577] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e736dba-2f06-45c9-a7ff-7209bccc39e4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.894943] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 773.894943] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529462bb-7904-6024-8bd6-9a2fe1cee12b" [ 773.894943] env[63028]: _type = "Task" [ 773.894943] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.907769] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529462bb-7904-6024-8bd6-9a2fe1cee12b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.932088] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.698s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.932336] env[63028]: DEBUG nova.compute.manager [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 773.936145] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.481s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.936382] env[63028]: DEBUG nova.objects.instance [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Lazy-loading 'resources' on Instance uuid 352ac7c3-17a8-4d7e-a66f-47ea7614892c {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 773.950265] env[63028]: DEBUG oslo_vmware.api [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735434, 'name': RemoveSnapshot_Task, 'duration_secs': 0.939529} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.951734] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 774.076485] env[63028]: DEBUG nova.compute.manager [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Received event network-changed-63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 774.076786] env[63028]: DEBUG nova.compute.manager [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Refreshing instance network info cache due to event network-changed-63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 774.077083] env[63028]: DEBUG oslo_concurrency.lockutils [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] Acquiring lock "refresh_cache-da88308f-ce62-40af-adae-e38aa506bdd9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.077269] env[63028]: DEBUG oslo_concurrency.lockutils [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] Acquired lock "refresh_cache-da88308f-ce62-40af-adae-e38aa506bdd9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.077471] env[63028]: DEBUG nova.network.neutron [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Refreshing network info cache for port 63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.099461] env[63028]: DEBUG nova.compute.manager [req-30c25956-d306-422d-9cf8-8bee539d540f req-df0bb6b3-2023-41d6-b285-1d03d9ac6353 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Received event network-changed-192a1d7c-32a1-4b52-944b-c6c3b6c52f93 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 774.099461] env[63028]: DEBUG nova.compute.manager [req-30c25956-d306-422d-9cf8-8bee539d540f req-df0bb6b3-2023-41d6-b285-1d03d9ac6353 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Refreshing instance network info cache due to event network-changed-192a1d7c-32a1-4b52-944b-c6c3b6c52f93. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 774.099824] env[63028]: DEBUG oslo_concurrency.lockutils [req-30c25956-d306-422d-9cf8-8bee539d540f req-df0bb6b3-2023-41d6-b285-1d03d9ac6353 service nova] Acquiring lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.100083] env[63028]: DEBUG oslo_concurrency.lockutils [req-30c25956-d306-422d-9cf8-8bee539d540f req-df0bb6b3-2023-41d6-b285-1d03d9ac6353 service nova] Acquired lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.100396] env[63028]: DEBUG nova.network.neutron [req-30c25956-d306-422d-9cf8-8bee539d540f req-df0bb6b3-2023-41d6-b285-1d03d9ac6353 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Refreshing network info cache for port 192a1d7c-32a1-4b52-944b-c6c3b6c52f93 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.173957] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2735436, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.177211] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Acquiring lock "refresh_cache-7e914e49-0d70-4024-940b-ad2a15e9dff7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.177211] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Acquired lock "refresh_cache-7e914e49-0d70-4024-940b-ad2a15e9dff7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.177393] env[63028]: DEBUG nova.network.neutron [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 774.234010] env[63028]: DEBUG oslo_vmware.api [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735437, 'name': PowerOffVM_Task, 'duration_secs': 0.242067} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.234455] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 774.234575] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 774.234767] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a07e24b4-6cda-49f1-ad0e-847f75dc868c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.309601] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 774.310009] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 774.310398] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleting the datastore file [datastore2] af87f1a5-b413-4b26-be91-474ad1f73df8 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 774.310732] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd6961c6-5a82-45e3-9805-5af5433b14d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.321252] env[63028]: DEBUG oslo_vmware.api [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 774.321252] env[63028]: value = "task-2735439" [ 774.321252] env[63028]: _type = "Task" [ 774.321252] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.331530] env[63028]: DEBUG oslo_vmware.api [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735439, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.405749] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529462bb-7904-6024-8bd6-9a2fe1cee12b, 'name': SearchDatastore_Task, 'duration_secs': 0.020004} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.406568] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4060a0f7-e197-46d4-82ef-9702eae16450 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.413115] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 774.413115] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5215b16f-cbbd-68c5-f77f-d2434e14e1a3" [ 774.413115] env[63028]: _type = "Task" [ 774.413115] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.422919] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5215b16f-cbbd-68c5-f77f-d2434e14e1a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.439523] env[63028]: DEBUG nova.compute.utils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 774.440951] env[63028]: DEBUG nova.compute.manager [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 774.441134] env[63028]: DEBUG nova.network.neutron [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 774.455823] env[63028]: WARNING nova.compute.manager [None req-408233a5-e0c5-4f83-9696-e436ee74635a tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Image not found during snapshot: nova.exception.ImageNotFound: Image 665ff5e9-b303-4514-9dfb-771d74a19520 could not be found. [ 774.513300] env[63028]: DEBUG nova.policy [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1b35dc6f0b14b528c2690fdf57410c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9ca23bbd50b041859820261db200b1af', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 774.675426] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2735436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.650726} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.677993] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] da88308f-ce62-40af-adae-e38aa506bdd9/da88308f-ce62-40af-adae-e38aa506bdd9.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 774.679173] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 774.679173] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77f33ea5-74c4-4057-adc3-fd72fcf1301b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.696014] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 774.696014] env[63028]: value = "task-2735440" [ 774.696014] env[63028]: _type = "Task" [ 774.696014] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.707601] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2735440, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.762789] env[63028]: DEBUG nova.network.neutron [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.832317] env[63028]: DEBUG oslo_vmware.api [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735439, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159721} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.835912] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 774.836194] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 774.836396] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 774.836575] env[63028]: INFO nova.compute.manager [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 774.836819] env[63028]: DEBUG oslo.service.loopingcall [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 774.840080] env[63028]: DEBUG nova.compute.manager [-] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 774.840080] env[63028]: DEBUG nova.network.neutron [-] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 774.932030] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5215b16f-cbbd-68c5-f77f-d2434e14e1a3, 'name': SearchDatastore_Task, 'duration_secs': 0.010782} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.932030] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.932030] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] cd11b318-9158-4f1d-8aa8-1c9d565bb5d5/cd11b318-9158-4f1d-8aa8-1c9d565bb5d5.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 774.932264] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd7606f5-b46d-4a1c-a2b4-18e5cc41cd6f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.942798] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 774.942798] env[63028]: value = "task-2735441" [ 774.942798] env[63028]: _type = "Task" [ 774.942798] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.949175] env[63028]: DEBUG nova.compute.manager [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 774.962509] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735441, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.035494] env[63028]: DEBUG nova.network.neutron [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Successfully created port: f00fcabf-7289-4128-84ab-c81a45858e92 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 775.077855] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd53247-ef39-4272-970a-c11d1868cc11 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.087875] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a8083e-f0f8-4531-ba54-d16081ef133f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.127735] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8558b590-22a4-4247-b50c-859923005ed5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.138795] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f00689-761c-43ed-b96a-10a4ffca3846 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.156039] env[63028]: DEBUG nova.compute.provider_tree [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.206652] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2735440, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095053} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.207032] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 775.207917] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944edfc8-1f84-49fb-a912-62e34147c638 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.236471] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] da88308f-ce62-40af-adae-e38aa506bdd9/da88308f-ce62-40af-adae-e38aa506bdd9.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 775.237239] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4ef9c73-d709-42f8-bfd9-6c5ceac1658f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.262286] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 775.262286] env[63028]: value = "task-2735442" [ 775.262286] env[63028]: _type = "Task" [ 775.262286] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.272832] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2735442, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.426796] env[63028]: DEBUG oslo_concurrency.lockutils [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "a4b0d948-d950-414a-b23f-faefa5ab038c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.426998] env[63028]: DEBUG oslo_concurrency.lockutils [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "a4b0d948-d950-414a-b23f-faefa5ab038c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.427212] env[63028]: DEBUG oslo_concurrency.lockutils [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "a4b0d948-d950-414a-b23f-faefa5ab038c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.427655] env[63028]: DEBUG oslo_concurrency.lockutils [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "a4b0d948-d950-414a-b23f-faefa5ab038c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.427902] env[63028]: DEBUG oslo_concurrency.lockutils [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "a4b0d948-d950-414a-b23f-faefa5ab038c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.430375] env[63028]: INFO nova.compute.manager [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Terminating instance [ 775.460137] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735441, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501597} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.460407] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] cd11b318-9158-4f1d-8aa8-1c9d565bb5d5/cd11b318-9158-4f1d-8aa8-1c9d565bb5d5.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 775.460641] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 775.461163] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dadfc1ad-acf7-47b5-939e-e3c255aea26c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.470294] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 775.470294] env[63028]: value = "task-2735443" [ 775.470294] env[63028]: _type = "Task" [ 775.470294] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.479888] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735443, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.541145] env[63028]: DEBUG nova.network.neutron [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Updated VIF entry in instance network info cache for port 63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 775.541523] env[63028]: DEBUG nova.network.neutron [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Updating instance_info_cache with network_info: [{"id": "63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5", "address": "fa:16:3e:4f:5f:59", "network": {"id": "e3a8845b-9fc6-46bd-8272-501135c875ad", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2047431351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f78ba4514500bfd4ed81b74526e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63f3891b-02", "ovs_interfaceid": "63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.559837] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "interface-5a340e31-678c-437e-aa4e-07d5d9f4334f-192a1d7c-32a1-4b52-944b-c6c3b6c52f93" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.559837] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-5a340e31-678c-437e-aa4e-07d5d9f4334f-192a1d7c-32a1-4b52-944b-c6c3b6c52f93" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.581934] env[63028]: DEBUG nova.network.neutron [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Updating instance_info_cache with network_info: [{"id": "4bf1e126-9048-489d-9c61-18ebe4c2af31", "address": "fa:16:3e:86:dd:f7", "network": {"id": "b27ad7b3-fa4d-4831-9ec4-23010a1301bc", "bridge": "br-int", "label": "tempest-ServersTestJSON-276288407-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3008f14179f4461aab24b22ce9174433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bf1e126-90", "ovs_interfaceid": "4bf1e126-9048-489d-9c61-18ebe4c2af31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.602671] env[63028]: DEBUG nova.network.neutron [req-30c25956-d306-422d-9cf8-8bee539d540f req-df0bb6b3-2023-41d6-b285-1d03d9ac6353 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Updated VIF entry in instance network info cache for port 192a1d7c-32a1-4b52-944b-c6c3b6c52f93. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 775.603186] env[63028]: DEBUG nova.network.neutron [req-30c25956-d306-422d-9cf8-8bee539d540f req-df0bb6b3-2023-41d6-b285-1d03d9ac6353 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Updating instance_info_cache with network_info: [{"id": "39f160e2-809e-4b2c-9424-70448b807385", "address": "fa:16:3e:f5:5d:b4", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39f160e2-80", "ovs_interfaceid": "39f160e2-809e-4b2c-9424-70448b807385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "192a1d7c-32a1-4b52-944b-c6c3b6c52f93", "address": "fa:16:3e:c3:2a:53", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap192a1d7c-32", "ovs_interfaceid": "192a1d7c-32a1-4b52-944b-c6c3b6c52f93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.661818] env[63028]: DEBUG nova.scheduler.client.report [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 775.774925] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2735442, 'name': ReconfigVM_Task, 'duration_secs': 0.47075} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.775275] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Reconfigured VM instance instance-00000032 to attach disk [datastore1] da88308f-ce62-40af-adae-e38aa506bdd9/da88308f-ce62-40af-adae-e38aa506bdd9.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.776177] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35926045-7e5e-46dd-8577-1cdd921c12da {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.784208] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 775.784208] env[63028]: value = "task-2735444" [ 775.784208] env[63028]: _type = "Task" [ 775.784208] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.793964] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2735444, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.862058] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5222cd1b-9682-3ff0-c7ce-26183c6c453f/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 775.862530] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 775.863594] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898f1eac-3da7-45e1-ab56-a4caf6b70f3c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.866945] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 775.873326] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5222cd1b-9682-3ff0-c7ce-26183c6c453f/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 775.873326] env[63028]: ERROR oslo_vmware.rw_handles [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5222cd1b-9682-3ff0-c7ce-26183c6c453f/disk-0.vmdk due to incomplete transfer. [ 775.873556] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3438740c-eed1-469a-adc1-7d8699f23f5d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.881888] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5222cd1b-9682-3ff0-c7ce-26183c6c453f/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 775.882118] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Uploaded image e5c2bb00-9e6e-489f-8c03-b884e4f87e0e to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 775.884088] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 775.884338] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-776e3049-71d1-4b2a-9f67-a7211f8ce502 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.890811] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 775.890811] env[63028]: value = "task-2735445" [ 775.890811] env[63028]: _type = "Task" [ 775.890811] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.900060] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735445, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.936376] env[63028]: DEBUG nova.compute.manager [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 775.936376] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 775.936840] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a024df3-17f9-4b78-af27-01618a1d6b4a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.945873] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 775.945873] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf0e8bdc-85ea-40b4-8c16-873cb655e46b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.954760] env[63028]: DEBUG oslo_vmware.api [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 775.954760] env[63028]: value = "task-2735446" [ 775.954760] env[63028]: _type = "Task" [ 775.954760] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.963216] env[63028]: DEBUG oslo_vmware.api [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735446, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.965546] env[63028]: DEBUG nova.compute.manager [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 775.983269] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735443, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.385188} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.983557] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 775.984355] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f542649b-19b5-4c75-907d-b172ff462a1a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.010015] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] cd11b318-9158-4f1d-8aa8-1c9d565bb5d5/cd11b318-9158-4f1d-8aa8-1c9d565bb5d5.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 776.012638] env[63028]: DEBUG nova.virt.hardware [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 776.012889] env[63028]: DEBUG nova.virt.hardware [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 776.013132] env[63028]: DEBUG nova.virt.hardware [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 776.013268] env[63028]: DEBUG nova.virt.hardware [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 776.013418] env[63028]: DEBUG nova.virt.hardware [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 776.013565] env[63028]: DEBUG nova.virt.hardware [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 776.013796] env[63028]: DEBUG nova.virt.hardware [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 776.013969] env[63028]: DEBUG nova.virt.hardware [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 776.014761] env[63028]: DEBUG nova.virt.hardware [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 776.014983] env[63028]: DEBUG nova.virt.hardware [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 776.015352] env[63028]: DEBUG nova.virt.hardware [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 776.015711] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1765fcb-b4bf-44f9-a878-0df5572763d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.031477] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1714c334-12c5-495e-a0cd-91bfd7a2d7a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.041895] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fe7552-7c80-4afb-aebe-2997ae9ae23c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.046885] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 776.046885] env[63028]: value = "task-2735447" [ 776.046885] env[63028]: _type = "Task" [ 776.046885] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.047592] env[63028]: DEBUG oslo_concurrency.lockutils [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] Releasing lock "refresh_cache-da88308f-ce62-40af-adae-e38aa506bdd9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.047811] env[63028]: DEBUG nova.compute.manager [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Received event network-vif-plugged-34861b32-33da-4ca0-b4ae-0031ab0f8619 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 776.048093] env[63028]: DEBUG oslo_concurrency.lockutils [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] Acquiring lock "cd11b318-9158-4f1d-8aa8-1c9d565bb5d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.048340] env[63028]: DEBUG oslo_concurrency.lockutils [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] Lock "cd11b318-9158-4f1d-8aa8-1c9d565bb5d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.048554] env[63028]: DEBUG oslo_concurrency.lockutils [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] Lock "cd11b318-9158-4f1d-8aa8-1c9d565bb5d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.048779] env[63028]: DEBUG nova.compute.manager [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] No waiting events found dispatching network-vif-plugged-34861b32-33da-4ca0-b4ae-0031ab0f8619 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 776.049620] env[63028]: WARNING nova.compute.manager [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Received unexpected event network-vif-plugged-34861b32-33da-4ca0-b4ae-0031ab0f8619 for instance with vm_state building and task_state spawning. [ 776.049620] env[63028]: DEBUG nova.compute.manager [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Received event network-changed-34861b32-33da-4ca0-b4ae-0031ab0f8619 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 776.049620] env[63028]: DEBUG nova.compute.manager [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Refreshing instance network info cache due to event network-changed-34861b32-33da-4ca0-b4ae-0031ab0f8619. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 776.049776] env[63028]: DEBUG oslo_concurrency.lockutils [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] Acquiring lock "refresh_cache-cd11b318-9158-4f1d-8aa8-1c9d565bb5d5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.049896] env[63028]: DEBUG oslo_concurrency.lockutils [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] Acquired lock "refresh_cache-cd11b318-9158-4f1d-8aa8-1c9d565bb5d5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.050101] env[63028]: DEBUG nova.network.neutron [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Refreshing network info cache for port 34861b32-33da-4ca0-b4ae-0031ab0f8619 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.065895] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.066092] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.067967] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265395d3-481b-4b91-9ccd-44840605e3d3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.073978] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735447, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.090879] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Releasing lock "refresh_cache-7e914e49-0d70-4024-940b-ad2a15e9dff7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.091205] env[63028]: DEBUG nova.compute.manager [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Instance network_info: |[{"id": "4bf1e126-9048-489d-9c61-18ebe4c2af31", "address": "fa:16:3e:86:dd:f7", "network": {"id": "b27ad7b3-fa4d-4831-9ec4-23010a1301bc", "bridge": "br-int", "label": "tempest-ServersTestJSON-276288407-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3008f14179f4461aab24b22ce9174433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bf1e126-90", "ovs_interfaceid": "4bf1e126-9048-489d-9c61-18ebe4c2af31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 776.091810] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:dd:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '298bb8ef-4765-494c-b157-7a349218bd1e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4bf1e126-9048-489d-9c61-18ebe4c2af31', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 776.099674] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Creating folder: Project (3008f14179f4461aab24b22ce9174433). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 776.100550] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d33810e-db07-4da7-90d5-5c2db376bf9b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.104340] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-909c1cf2-4301-41f2-84f9-f743d10cd050 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.107287] env[63028]: DEBUG oslo_concurrency.lockutils [req-30c25956-d306-422d-9cf8-8bee539d540f req-df0bb6b3-2023-41d6-b285-1d03d9ac6353 service nova] Releasing lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.133259] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Reconfiguring VM to detach interface {{(pid=63028) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 776.138021] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9da3df4a-1fbd-4db3-b976-a2e0651113df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.148109] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Created folder: Project (3008f14179f4461aab24b22ce9174433) in parent group-v550570. [ 776.148326] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Creating folder: Instances. Parent ref: group-v550731. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 776.148609] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8977b9ba-7967-4f59-bf31-2e69d4892ac8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.158318] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 776.158318] env[63028]: value = "task-2735450" [ 776.158318] env[63028]: _type = "Task" [ 776.158318] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.168877] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.233s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.171035] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.172683] env[63028]: DEBUG oslo_concurrency.lockutils [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.107s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.172940] env[63028]: DEBUG nova.objects.instance [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Lazy-loading 'resources' on Instance uuid 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 776.174309] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Created folder: Instances in parent group-v550731. [ 776.174596] env[63028]: DEBUG oslo.service.loopingcall [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 776.175129] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 776.175358] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39174dc7-6aa0-4630-9ade-75b82125dba5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.193236] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 776.193591] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550729', 'volume_id': 'fdc60726-6ead-4fa7-9b0a-56a0c631789c', 'name': 'volume-fdc60726-6ead-4fa7-9b0a-56a0c631789c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd663c2df-ae54-4c50-a70f-e2180700c700', 'attached_at': '', 'detached_at': '', 'volume_id': 'fdc60726-6ead-4fa7-9b0a-56a0c631789c', 'serial': 'fdc60726-6ead-4fa7-9b0a-56a0c631789c'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 776.194796] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f49bae-cd87-4bfc-a927-a47243ddc7d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.214329] env[63028]: INFO nova.scheduler.client.report [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Deleted allocations for instance 352ac7c3-17a8-4d7e-a66f-47ea7614892c [ 776.216680] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df44238e-df6b-4899-81a4-9e99c4fcc690 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.219341] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 776.219341] env[63028]: value = "task-2735451" [ 776.219341] env[63028]: _type = "Task" [ 776.219341] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.251207] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] volume-fdc60726-6ead-4fa7-9b0a-56a0c631789c/volume-fdc60726-6ead-4fa7-9b0a-56a0c631789c.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 776.252209] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bfe0fd9-6d34-4799-b547-0447ecb5c31f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.271232] env[63028]: DEBUG nova.network.neutron [-] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.273911] env[63028]: DEBUG nova.compute.manager [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Received event network-vif-plugged-4bf1e126-9048-489d-9c61-18ebe4c2af31 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 776.274172] env[63028]: DEBUG oslo_concurrency.lockutils [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] Acquiring lock "7e914e49-0d70-4024-940b-ad2a15e9dff7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.274412] env[63028]: DEBUG oslo_concurrency.lockutils [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] Lock "7e914e49-0d70-4024-940b-ad2a15e9dff7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.274690] env[63028]: DEBUG oslo_concurrency.lockutils [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] Lock "7e914e49-0d70-4024-940b-ad2a15e9dff7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.274981] env[63028]: DEBUG nova.compute.manager [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] No waiting events found dispatching network-vif-plugged-4bf1e126-9048-489d-9c61-18ebe4c2af31 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 776.275307] env[63028]: WARNING nova.compute.manager [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Received unexpected event network-vif-plugged-4bf1e126-9048-489d-9c61-18ebe4c2af31 for instance with vm_state building and task_state spawning. [ 776.275564] env[63028]: DEBUG nova.compute.manager [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Received event network-changed-4bf1e126-9048-489d-9c61-18ebe4c2af31 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 776.275844] env[63028]: DEBUG nova.compute.manager [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Refreshing instance network info cache due to event network-changed-4bf1e126-9048-489d-9c61-18ebe4c2af31. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 776.276196] env[63028]: DEBUG oslo_concurrency.lockutils [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] Acquiring lock "refresh_cache-7e914e49-0d70-4024-940b-ad2a15e9dff7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.276463] env[63028]: DEBUG oslo_concurrency.lockutils [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] Acquired lock "refresh_cache-7e914e49-0d70-4024-940b-ad2a15e9dff7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.276753] env[63028]: DEBUG nova.network.neutron [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Refreshing network info cache for port 4bf1e126-9048-489d-9c61-18ebe4c2af31 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.278671] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735451, 'name': CreateVM_Task} progress is 15%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.285768] env[63028]: DEBUG oslo_vmware.api [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 776.285768] env[63028]: value = "task-2735452" [ 776.285768] env[63028]: _type = "Task" [ 776.285768] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.291761] env[63028]: DEBUG nova.compute.manager [req-b0e21bb2-fb38-44d4-8df8-36f3222d72fb req-25a87c41-6897-471c-8b64-ca86dea534e7 service nova] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Received event network-vif-deleted-431bf6ff-554c-484a-8431-a1f18a9b937e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 776.291997] env[63028]: INFO nova.compute.manager [req-b0e21bb2-fb38-44d4-8df8-36f3222d72fb req-25a87c41-6897-471c-8b64-ca86dea534e7 service nova] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Neutron deleted interface 431bf6ff-554c-484a-8431-a1f18a9b937e; detaching it from the instance and deleting it from the info cache [ 776.292221] env[63028]: DEBUG nova.network.neutron [req-b0e21bb2-fb38-44d4-8df8-36f3222d72fb req-25a87c41-6897-471c-8b64-ca86dea534e7 service nova] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.306033] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2735444, 'name': Rename_Task, 'duration_secs': 0.154523} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.310027] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 776.310584] env[63028]: DEBUG oslo_vmware.api [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735452, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.310822] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6773fee1-773b-4e11-af29-60cb3e545a95 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.319298] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 776.319298] env[63028]: value = "task-2735453" [ 776.319298] env[63028]: _type = "Task" [ 776.319298] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.328976] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2735453, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.376571] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.376973] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.377229] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.377428] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.377679] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.377810] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.377891] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63028) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 776.378482] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.401771] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735445, 'name': Destroy_Task, 'duration_secs': 0.403054} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.402099] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Destroyed the VM [ 776.402390] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 776.402681] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2780debc-bfe5-4c66-8d62-1478998040ea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.410210] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 776.410210] env[63028]: value = "task-2735454" [ 776.410210] env[63028]: _type = "Task" [ 776.410210] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.418377] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735454, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.464446] env[63028]: DEBUG oslo_vmware.api [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735446, 'name': PowerOffVM_Task, 'duration_secs': 0.232555} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.464643] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 776.464899] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 776.465254] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8678726a-d26b-4110-b182-afc784d97a1c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.545946] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 776.546186] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 776.546387] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Deleting the datastore file [datastore2] a4b0d948-d950-414a-b23f-faefa5ab038c {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 776.546665] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-942e37fd-de00-4800-a825-ecd35c2c1667 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.556964] env[63028]: DEBUG oslo_vmware.api [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 776.556964] env[63028]: value = "task-2735456" [ 776.556964] env[63028]: _type = "Task" [ 776.556964] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.564929] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735447, 'name': ReconfigVM_Task, 'duration_secs': 0.440555} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.565829] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Reconfigured VM instance instance-00000033 to attach disk [datastore1] cd11b318-9158-4f1d-8aa8-1c9d565bb5d5/cd11b318-9158-4f1d-8aa8-1c9d565bb5d5.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 776.569110] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79438c3c-fc45-42d9-b715-1e041736e9e6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.574155] env[63028]: DEBUG oslo_vmware.api [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.581555] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 776.581555] env[63028]: value = "task-2735457" [ 776.581555] env[63028]: _type = "Task" [ 776.581555] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.591489] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735457, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.671580] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.738368] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4b397209-e0b0-4739-afc2-198208fd9eb6 tempest-ServersNegativeTestMultiTenantJSON-980579499 tempest-ServersNegativeTestMultiTenantJSON-980579499-project-member] Lock "352ac7c3-17a8-4d7e-a66f-47ea7614892c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.921s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.749579] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735451, 'name': CreateVM_Task, 'duration_secs': 0.373661} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.749811] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 776.750597] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.750796] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.751138] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 776.751401] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c34f4e15-8da4-4e14-ae57-4b3b4c19bd0a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.756883] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Waiting for the task: (returnval){ [ 776.756883] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a4eb74-c882-2444-fdbd-fea45eeed43c" [ 776.756883] env[63028]: _type = "Task" [ 776.756883] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.769089] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a4eb74-c882-2444-fdbd-fea45eeed43c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.780447] env[63028]: INFO nova.compute.manager [-] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Took 1.94 seconds to deallocate network for instance. [ 776.801104] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e70da1bb-aa38-4ddc-a932-54637a76c0be {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.811237] env[63028]: DEBUG oslo_vmware.api [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735452, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.819548] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f8571b-dfad-4395-985d-aac2da6ea345 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.845041] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2735453, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.862254] env[63028]: DEBUG nova.compute.manager [req-b0e21bb2-fb38-44d4-8df8-36f3222d72fb req-25a87c41-6897-471c-8b64-ca86dea534e7 service nova] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Detach interface failed, port_id=431bf6ff-554c-484a-8431-a1f18a9b937e, reason: Instance af87f1a5-b413-4b26-be91-474ad1f73df8 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 776.881226] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.920137] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735454, 'name': RemoveSnapshot_Task} progress is 78%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.944035] env[63028]: DEBUG nova.network.neutron [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Updated VIF entry in instance network info cache for port 34861b32-33da-4ca0-b4ae-0031ab0f8619. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 776.944440] env[63028]: DEBUG nova.network.neutron [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Updating instance_info_cache with network_info: [{"id": "34861b32-33da-4ca0-b4ae-0031ab0f8619", "address": "fa:16:3e:46:83:a5", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34861b32-33", "ovs_interfaceid": "34861b32-33da-4ca0-b4ae-0031ab0f8619", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.026559] env[63028]: DEBUG nova.network.neutron [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Successfully updated port: f00fcabf-7289-4128-84ab-c81a45858e92 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 777.071108] env[63028]: DEBUG oslo_vmware.api [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190898} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.076859] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 777.076859] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 777.076859] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 777.076859] env[63028]: INFO nova.compute.manager [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 777.076859] env[63028]: DEBUG oslo.service.loopingcall [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 777.076859] env[63028]: DEBUG nova.compute.manager [-] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 777.076859] env[63028]: DEBUG nova.network.neutron [-] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 777.095623] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735457, 'name': Rename_Task, 'duration_secs': 0.228411} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.095900] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 777.096174] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-563ec237-13ae-45d1-972d-6dd2f6ebee76 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.102179] env[63028]: DEBUG nova.network.neutron [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Updated VIF entry in instance network info cache for port 4bf1e126-9048-489d-9c61-18ebe4c2af31. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.102179] env[63028]: DEBUG nova.network.neutron [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Updating instance_info_cache with network_info: [{"id": "4bf1e126-9048-489d-9c61-18ebe4c2af31", "address": "fa:16:3e:86:dd:f7", "network": {"id": "b27ad7b3-fa4d-4831-9ec4-23010a1301bc", "bridge": "br-int", "label": "tempest-ServersTestJSON-276288407-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3008f14179f4461aab24b22ce9174433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bf1e126-90", "ovs_interfaceid": "4bf1e126-9048-489d-9c61-18ebe4c2af31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.108135] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 777.108135] env[63028]: value = "task-2735458" [ 777.108135] env[63028]: _type = "Task" [ 777.108135] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.126198] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735458, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.172043] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.269596] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a4eb74-c882-2444-fdbd-fea45eeed43c, 'name': SearchDatastore_Task, 'duration_secs': 0.014444} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.270075] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.270442] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 777.271111] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.271393] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.271652] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 777.272040] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38439741-6e31-4eef-9fee-94dd69064619 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.284973] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 777.285332] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 777.287717] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9692ed19-9974-4729-aca3-f39f27ee5ad4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.294577] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Waiting for the task: (returnval){ [ 777.294577] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5269b469-cec0-905d-3f76-0aa2038bb537" [ 777.294577] env[63028]: _type = "Task" [ 777.294577] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.298877] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.308110] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5269b469-cec0-905d-3f76-0aa2038bb537, 'name': SearchDatastore_Task, 'duration_secs': 0.011446} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.314894] env[63028]: DEBUG oslo_vmware.api [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735452, 'name': ReconfigVM_Task, 'duration_secs': 0.618191} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.316062] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-671a0446-bc3b-4b11-8a8e-8be1557cceb4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.318170] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Reconfigured VM instance instance-0000000c to attach disk [datastore1] volume-fdc60726-6ead-4fa7-9b0a-56a0c631789c/volume-fdc60726-6ead-4fa7-9b0a-56a0c631789c.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 777.322942] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b54d2b98-4193-4a2d-b18c-79fa1e3ea815 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.342162] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Waiting for the task: (returnval){ [ 777.342162] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ecc203-8cd5-b010-762e-32eb39c6e55d" [ 777.342162] env[63028]: _type = "Task" [ 777.342162] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.349854] env[63028]: DEBUG oslo_vmware.api [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 777.349854] env[63028]: value = "task-2735459" [ 777.349854] env[63028]: _type = "Task" [ 777.349854] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.350110] env[63028]: DEBUG oslo_vmware.api [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2735453, 'name': PowerOnVM_Task, 'duration_secs': 0.66301} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.351809] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 777.351918] env[63028]: INFO nova.compute.manager [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Took 9.50 seconds to spawn the instance on the hypervisor. [ 777.352176] env[63028]: DEBUG nova.compute.manager [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 777.353049] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0488f18-5e1d-418a-afcc-e6626ba1a551 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.363245] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163b3975-d2a9-4208-8851-ff319a54a6e1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.365794] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ecc203-8cd5-b010-762e-32eb39c6e55d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.377368] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7234b015-c2ee-4a4f-8aeb-15b0bea12b6c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.383414] env[63028]: DEBUG oslo_vmware.api [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735459, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.418746] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097fe93b-4027-4229-8bea-0483740aed40 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.430471] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17540823-eff8-4038-b2d9-c2bae0f2d828 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.434710] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735454, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.446553] env[63028]: DEBUG nova.compute.provider_tree [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.448105] env[63028]: DEBUG oslo_concurrency.lockutils [req-99d87696-03fa-4512-af18-b2e10da8e816 req-119cfcf3-73a6-4b13-a557-737ecdc66192 service nova] Releasing lock "refresh_cache-cd11b318-9158-4f1d-8aa8-1c9d565bb5d5" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.534700] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "refresh_cache-15326f55-2db8-47c3-b1fd-ce8ba1174c79" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.534865] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquired lock "refresh_cache-15326f55-2db8-47c3-b1fd-ce8ba1174c79" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.535035] env[63028]: DEBUG nova.network.neutron [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.604258] env[63028]: DEBUG oslo_concurrency.lockutils [req-43aa28ec-0d18-4c28-a13c-e8b84df5c5ff req-7819da39-aa4b-4910-8e11-a141d960e601 service nova] Releasing lock "refresh_cache-7e914e49-0d70-4024-940b-ad2a15e9dff7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.620620] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735458, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.670245] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.855389] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ecc203-8cd5-b010-762e-32eb39c6e55d, 'name': SearchDatastore_Task, 'duration_secs': 0.020384} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.858780] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.859170] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 7e914e49-0d70-4024-940b-ad2a15e9dff7/7e914e49-0d70-4024-940b-ad2a15e9dff7.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 777.859407] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ff461c8-89fb-4353-9646-dbb25efe9183 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.868504] env[63028]: DEBUG nova.network.neutron [-] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.870086] env[63028]: DEBUG oslo_vmware.api [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735459, 'name': ReconfigVM_Task, 'duration_secs': 0.322355} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.872686] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550729', 'volume_id': 'fdc60726-6ead-4fa7-9b0a-56a0c631789c', 'name': 'volume-fdc60726-6ead-4fa7-9b0a-56a0c631789c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd663c2df-ae54-4c50-a70f-e2180700c700', 'attached_at': '', 'detached_at': '', 'volume_id': 'fdc60726-6ead-4fa7-9b0a-56a0c631789c', 'serial': 'fdc60726-6ead-4fa7-9b0a-56a0c631789c'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 777.873345] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Waiting for the task: (returnval){ [ 777.873345] env[63028]: value = "task-2735460" [ 777.873345] env[63028]: _type = "Task" [ 777.873345] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.892756] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.895200] env[63028]: INFO nova.compute.manager [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Took 36.44 seconds to build instance. [ 777.928734] env[63028]: DEBUG oslo_vmware.api [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735454, 'name': RemoveSnapshot_Task, 'duration_secs': 1.029188} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.929310] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 777.929689] env[63028]: INFO nova.compute.manager [None req-f78b3882-29ca-452b-a11e-a6f9e213d5cd tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Took 16.12 seconds to snapshot the instance on the hypervisor. [ 777.950050] env[63028]: DEBUG nova.scheduler.client.report [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 778.076798] env[63028]: DEBUG nova.network.neutron [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.120033] env[63028]: DEBUG oslo_vmware.api [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735458, 'name': PowerOnVM_Task, 'duration_secs': 0.941136} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.120281] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 778.120372] env[63028]: INFO nova.compute.manager [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Took 7.50 seconds to spawn the instance on the hypervisor. [ 778.120553] env[63028]: DEBUG nova.compute.manager [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 778.121401] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1c83ea-8cf9-4245-8481-1fbe27eee40b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.174016] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.282998] env[63028]: DEBUG nova.network.neutron [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Updating instance_info_cache with network_info: [{"id": "f00fcabf-7289-4128-84ab-c81a45858e92", "address": "fa:16:3e:e4:62:8a", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00fcabf-72", "ovs_interfaceid": "f00fcabf-7289-4128-84ab-c81a45858e92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.374862] env[63028]: INFO nova.compute.manager [-] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Took 1.30 seconds to deallocate network for instance. [ 778.392694] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735460, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.398578] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f33d3f0b-c388-40bb-bf9b-780b1fd7657c tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "da88308f-ce62-40af-adae-e38aa506bdd9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.499s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.456223] env[63028]: DEBUG oslo_concurrency.lockutils [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.283s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.459436] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.564s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.464670] env[63028]: INFO nova.compute.claims [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.490369] env[63028]: INFO nova.scheduler.client.report [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Deleted allocations for instance 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0 [ 778.504446] env[63028]: DEBUG nova.compute.manager [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Received event network-vif-plugged-f00fcabf-7289-4128-84ab-c81a45858e92 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 778.504446] env[63028]: DEBUG oslo_concurrency.lockutils [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] Acquiring lock "15326f55-2db8-47c3-b1fd-ce8ba1174c79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.504446] env[63028]: DEBUG oslo_concurrency.lockutils [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] Lock "15326f55-2db8-47c3-b1fd-ce8ba1174c79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.504446] env[63028]: DEBUG oslo_concurrency.lockutils [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] Lock "15326f55-2db8-47c3-b1fd-ce8ba1174c79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.504446] env[63028]: DEBUG nova.compute.manager [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] No waiting events found dispatching network-vif-plugged-f00fcabf-7289-4128-84ab-c81a45858e92 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 778.504446] env[63028]: WARNING nova.compute.manager [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Received unexpected event network-vif-plugged-f00fcabf-7289-4128-84ab-c81a45858e92 for instance with vm_state building and task_state spawning. [ 778.504446] env[63028]: DEBUG nova.compute.manager [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Received event network-changed-f00fcabf-7289-4128-84ab-c81a45858e92 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 778.504446] env[63028]: DEBUG nova.compute.manager [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Refreshing instance network info cache due to event network-changed-f00fcabf-7289-4128-84ab-c81a45858e92. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 778.504960] env[63028]: DEBUG oslo_concurrency.lockutils [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] Acquiring lock "refresh_cache-15326f55-2db8-47c3-b1fd-ce8ba1174c79" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.646351] env[63028]: INFO nova.compute.manager [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Took 33.23 seconds to build instance. [ 778.675317] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.789042] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Releasing lock "refresh_cache-15326f55-2db8-47c3-b1fd-ce8ba1174c79" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.789042] env[63028]: DEBUG nova.compute.manager [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Instance network_info: |[{"id": "f00fcabf-7289-4128-84ab-c81a45858e92", "address": "fa:16:3e:e4:62:8a", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00fcabf-72", "ovs_interfaceid": "f00fcabf-7289-4128-84ab-c81a45858e92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 778.789042] env[63028]: DEBUG oslo_concurrency.lockutils [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] Acquired lock "refresh_cache-15326f55-2db8-47c3-b1fd-ce8ba1174c79" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.789042] env[63028]: DEBUG nova.network.neutron [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Refreshing network info cache for port f00fcabf-7289-4128-84ab-c81a45858e92 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 778.789042] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:62:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f00fcabf-7289-4128-84ab-c81a45858e92', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 778.804557] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Creating folder: Project (9ca23bbd50b041859820261db200b1af). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 778.805810] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d917ab98-663e-40e7-9a78-852d4d4d6238 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.820263] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Created folder: Project (9ca23bbd50b041859820261db200b1af) in parent group-v550570. [ 778.820557] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Creating folder: Instances. Parent ref: group-v550734. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 778.820985] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3f6c2bb-e325-4e4f-80e3-af0edadf714b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.832278] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Created folder: Instances in parent group-v550734. [ 778.832593] env[63028]: DEBUG oslo.service.loopingcall [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 778.832708] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 778.832976] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c434a3d1-51f5-458e-acf3-11936e523b4a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.856194] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 778.856194] env[63028]: value = "task-2735463" [ 778.856194] env[63028]: _type = "Task" [ 778.856194] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.864820] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735463, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.887092] env[63028]: DEBUG oslo_concurrency.lockutils [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.887413] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735460, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537729} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.887650] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 7e914e49-0d70-4024-940b-ad2a15e9dff7/7e914e49-0d70-4024-940b-ad2a15e9dff7.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 778.887892] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 778.888160] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a46540b9-5689-4de4-b25c-54122b185a30 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.897674] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Waiting for the task: (returnval){ [ 778.897674] env[63028]: value = "task-2735464" [ 778.897674] env[63028]: _type = "Task" [ 778.897674] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.901439] env[63028]: DEBUG nova.compute.manager [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 778.911786] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735464, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.936033] env[63028]: DEBUG nova.objects.instance [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lazy-loading 'flavor' on Instance uuid d663c2df-ae54-4c50-a70f-e2180700c700 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 779.001130] env[63028]: DEBUG oslo_concurrency.lockutils [None req-093af5c2-3354-4012-ab41-e1a65935f3a3 tempest-ServersTestBootFromVolume-732507099 tempest-ServersTestBootFromVolume-732507099-project-member] Lock "8c7c8713-d5d7-490e-aba5-25d98bfbfaa0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.965s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.058067] env[63028]: DEBUG nova.compute.manager [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 779.058745] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f07f4d8-b636-4b7d-ba4b-e1ce75bbe77b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.147863] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d602f814-ca27-4ecc-a846-b4d4de9caa30 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "cd11b318-9158-4f1d-8aa8-1c9d565bb5d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.479s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.174265] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.369750] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735463, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.407803] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735464, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.203552} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.410533] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 779.411217] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715c4887-f13d-4ce5-b8c0-17067fd22c41 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.439399] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 7e914e49-0d70-4024-940b-ad2a15e9dff7/7e914e49-0d70-4024-940b-ad2a15e9dff7.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 779.442580] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.444383] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08a2307a-1d67-4d39-b13d-260c6d04f827 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.458510] env[63028]: DEBUG oslo_concurrency.lockutils [None req-64d57359-6efb-4dbd-9bd5-0b6de0e28336 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "d663c2df-ae54-4c50-a70f-e2180700c700" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.403s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.466634] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Waiting for the task: (returnval){ [ 779.466634] env[63028]: value = "task-2735465" [ 779.466634] env[63028]: _type = "Task" [ 779.466634] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.479716] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735465, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.572392] env[63028]: INFO nova.compute.manager [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] instance snapshotting [ 779.572943] env[63028]: DEBUG nova.objects.instance [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'flavor' on Instance uuid c06813c4-472d-4bf9-84ec-0d01306bcd48 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 779.650521] env[63028]: DEBUG nova.compute.manager [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 779.678568] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.741554] env[63028]: DEBUG nova.network.neutron [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Updated VIF entry in instance network info cache for port f00fcabf-7289-4128-84ab-c81a45858e92. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 779.743691] env[63028]: DEBUG nova.network.neutron [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Updating instance_info_cache with network_info: [{"id": "f00fcabf-7289-4128-84ab-c81a45858e92", "address": "fa:16:3e:e4:62:8a", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00fcabf-72", "ovs_interfaceid": "f00fcabf-7289-4128-84ab-c81a45858e92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.880468] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735463, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.982065] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.061262] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a347723-6ec7-4d91-910e-65a69318444a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.068449] env[63028]: DEBUG nova.compute.manager [req-060068fb-29b3-448d-a152-cbe02d0a9a3c req-95413f14-b389-4818-942c-87548a3e0c8c service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Received event network-changed-63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 780.068449] env[63028]: DEBUG nova.compute.manager [req-060068fb-29b3-448d-a152-cbe02d0a9a3c req-95413f14-b389-4818-942c-87548a3e0c8c service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Refreshing instance network info cache due to event network-changed-63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 780.068569] env[63028]: DEBUG oslo_concurrency.lockutils [req-060068fb-29b3-448d-a152-cbe02d0a9a3c req-95413f14-b389-4818-942c-87548a3e0c8c service nova] Acquiring lock "refresh_cache-da88308f-ce62-40af-adae-e38aa506bdd9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.068631] env[63028]: DEBUG oslo_concurrency.lockutils [req-060068fb-29b3-448d-a152-cbe02d0a9a3c req-95413f14-b389-4818-942c-87548a3e0c8c service nova] Acquired lock "refresh_cache-da88308f-ce62-40af-adae-e38aa506bdd9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.068793] env[63028]: DEBUG nova.network.neutron [req-060068fb-29b3-448d-a152-cbe02d0a9a3c req-95413f14-b389-4818-942c-87548a3e0c8c service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Refreshing network info cache for port 63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 780.074431] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0659656-a860-4736-9a7b-05173ed38bd8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.082802] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d43954-e782-41ac-841c-d4cacd8570e9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.117040] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e35fb2a-d46c-4a19-ab15-3db7de48e71b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.137031] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6c3260-b3ed-4c15-8906-cc5b12982e51 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.143520] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7c51b4-aeb3-42c4-9acb-50a72b62e471 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.165274] env[63028]: DEBUG nova.compute.provider_tree [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 780.179029] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.189242] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.249988] env[63028]: DEBUG oslo_concurrency.lockutils [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] Releasing lock "refresh_cache-15326f55-2db8-47c3-b1fd-ce8ba1174c79" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.250513] env[63028]: DEBUG nova.compute.manager [req-d9b04050-f3b2-4b9e-8d8c-e17a578e17c4 req-1f421d08-f20e-4a01-90c9-12577834595d service nova] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Received event network-vif-deleted-5dce79e8-b6a0-4077-a879-e3221a0a358c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 780.374925] env[63028]: DEBUG oslo_concurrency.lockutils [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "d663c2df-ae54-4c50-a70f-e2180700c700" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.375849] env[63028]: DEBUG oslo_concurrency.lockutils [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "d663c2df-ae54-4c50-a70f-e2180700c700" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.377725] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735463, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.480638] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.670134] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 780.671140] env[63028]: DEBUG nova.scheduler.client.report [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 780.674572] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b6e48893-7133-4576-a180-224c5b34c682 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.687959] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.691663] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 780.691663] env[63028]: value = "task-2735466" [ 780.691663] env[63028]: _type = "Task" [ 780.691663] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.704116] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735466, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.840824] env[63028]: DEBUG nova.compute.manager [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 780.846907] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf16a46-aa81-421d-8807-6fe9406781b1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.873982] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735463, 'name': CreateVM_Task, 'duration_secs': 1.641921} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.873982] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 780.873982] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.873982] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.873982] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 780.878022] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-617784c5-cf8f-4b27-a456-cd44554a0b15 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.878742] env[63028]: INFO nova.compute.manager [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Detaching volume fdc60726-6ead-4fa7-9b0a-56a0c631789c [ 780.882506] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 780.882506] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5242fc5a-0268-b04b-2ea3-9c73c42c5f36" [ 780.882506] env[63028]: _type = "Task" [ 780.882506] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.895364] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5242fc5a-0268-b04b-2ea3-9c73c42c5f36, 'name': SearchDatastore_Task, 'duration_secs': 0.010934} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.895763] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.895869] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 780.896103] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.896251] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.896657] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 780.896939] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bddb9254-aa80-4588-b55b-fbf228a8de91 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.908329] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 780.908597] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 780.909684] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb9cedb3-b7d7-4fbd-b1c3-cbdd58054906 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.915658] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 780.915658] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52cfbdf4-9692-3010-d737-3bafdcf91d99" [ 780.915658] env[63028]: _type = "Task" [ 780.915658] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.920084] env[63028]: INFO nova.virt.block_device [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Attempting to driver detach volume fdc60726-6ead-4fa7-9b0a-56a0c631789c from mountpoint /dev/sdb [ 780.920303] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 780.920485] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550729', 'volume_id': 'fdc60726-6ead-4fa7-9b0a-56a0c631789c', 'name': 'volume-fdc60726-6ead-4fa7-9b0a-56a0c631789c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd663c2df-ae54-4c50-a70f-e2180700c700', 'attached_at': '', 'detached_at': '', 'volume_id': 'fdc60726-6ead-4fa7-9b0a-56a0c631789c', 'serial': 'fdc60726-6ead-4fa7-9b0a-56a0c631789c'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 780.921321] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6675034-9682-4d4c-80ea-5bee205e157d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.927090] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52cfbdf4-9692-3010-d737-3bafdcf91d99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.947157] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2318b3-a775-447c-acac-303a37e3f7b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.955422] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff0070c-f743-4c9d-b397-74bba146c89b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.982748] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92990cc3-ad4a-4f1e-b396-106212c3754e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.990677] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735465, 'name': ReconfigVM_Task, 'duration_secs': 1.233547} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.002727] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 7e914e49-0d70-4024-940b-ad2a15e9dff7/7e914e49-0d70-4024-940b-ad2a15e9dff7.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 781.004300] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] The volume has not been displaced from its original location: [datastore1] volume-fdc60726-6ead-4fa7-9b0a-56a0c631789c/volume-fdc60726-6ead-4fa7-9b0a-56a0c631789c.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 781.009367] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Reconfiguring VM instance instance-0000000c to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 781.009656] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd55a296-9472-495e-9c42-0a69dd855d24 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.011289] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da2978b6-cb5c-48cd-b870-75f6a684dd75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.030941] env[63028]: DEBUG oslo_vmware.api [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 781.030941] env[63028]: value = "task-2735468" [ 781.030941] env[63028]: _type = "Task" [ 781.030941] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.032429] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Waiting for the task: (returnval){ [ 781.032429] env[63028]: value = "task-2735467" [ 781.032429] env[63028]: _type = "Task" [ 781.032429] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.044899] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735467, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.049291] env[63028]: DEBUG oslo_vmware.api [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735468, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.144620] env[63028]: DEBUG nova.network.neutron [req-060068fb-29b3-448d-a152-cbe02d0a9a3c req-95413f14-b389-4818-942c-87548a3e0c8c service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Updated VIF entry in instance network info cache for port 63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 781.145008] env[63028]: DEBUG nova.network.neutron [req-060068fb-29b3-448d-a152-cbe02d0a9a3c req-95413f14-b389-4818-942c-87548a3e0c8c service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Updating instance_info_cache with network_info: [{"id": "63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5", "address": "fa:16:3e:4f:5f:59", "network": {"id": "e3a8845b-9fc6-46bd-8272-501135c875ad", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2047431351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f78ba4514500bfd4ed81b74526e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63f3891b-02", "ovs_interfaceid": "63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.181742] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.182268] env[63028]: DEBUG nova.compute.manager [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 781.185336] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.186346] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.279s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.186346] env[63028]: DEBUG nova.objects.instance [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lazy-loading 'resources' on Instance uuid e2d39c43-6666-4fda-b8e2-485399c59e46 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 781.200423] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735466, 'name': CreateSnapshot_Task, 'duration_secs': 0.468743} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.200719] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 781.201493] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9abb325-6b55-4ddb-aeb8-6d6a3d351d98 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.320427] env[63028]: DEBUG nova.compute.manager [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.322413] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e3a465-23b6-403e-a308-95e869b32fa7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.359515] env[63028]: INFO nova.compute.manager [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] instance snapshotting [ 781.363908] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c97f5aa-af2f-430b-953a-96ef9a1f1eb7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.386626] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be1838c-c688-4021-8bb7-5bf99c139366 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.428981] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52cfbdf4-9692-3010-d737-3bafdcf91d99, 'name': SearchDatastore_Task, 'duration_secs': 0.009971} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.428981] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fa12967-088e-4e83-a056-c52558211056 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.432817] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 781.432817] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526166f1-0e55-2212-704a-b4d5ab3e65f2" [ 781.432817] env[63028]: _type = "Task" [ 781.432817] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.441178] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526166f1-0e55-2212-704a-b4d5ab3e65f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.557515] env[63028]: DEBUG oslo_vmware.api [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735468, 'name': ReconfigVM_Task, 'duration_secs': 0.353823} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.558162] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735467, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.558468] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Reconfigured VM instance instance-0000000c to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 781.563515] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1950ba7d-b2e0-437f-8715-3c9648f50ade {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.580227] env[63028]: DEBUG oslo_vmware.api [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 781.580227] env[63028]: value = "task-2735469" [ 781.580227] env[63028]: _type = "Task" [ 781.580227] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.592014] env[63028]: DEBUG oslo_vmware.api [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735469, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.649094] env[63028]: DEBUG oslo_concurrency.lockutils [req-060068fb-29b3-448d-a152-cbe02d0a9a3c req-95413f14-b389-4818-942c-87548a3e0c8c service nova] Releasing lock "refresh_cache-da88308f-ce62-40af-adae-e38aa506bdd9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.682804] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.687445] env[63028]: DEBUG nova.compute.utils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 781.688866] env[63028]: DEBUG nova.compute.manager [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 781.689084] env[63028]: DEBUG nova.network.neutron [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 781.720824] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 781.726982] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9bcb2556-119c-4285-bf71-2e03cc59647d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.739969] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 781.739969] env[63028]: value = "task-2735470" [ 781.739969] env[63028]: _type = "Task" [ 781.739969] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.751239] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735470, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.831138] env[63028]: DEBUG nova.policy [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '743cd51155e0498bb9b381d243afb624', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11332c2adbdc41928d4bf084435e2037', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 781.835801] env[63028]: INFO nova.compute.manager [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] instance snapshotting [ 781.845821] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae28c2b-be63-49c7-82b9-57d6b1e75298 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.874985] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9dc3c4-9586-45ce-8939-56c2f99f12dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.899773] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 781.900390] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-27eedefb-22f8-4f60-8bd0-746cdd92ffcd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.909896] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 781.909896] env[63028]: value = "task-2735471" [ 781.909896] env[63028]: _type = "Task" [ 781.909896] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.932833] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735471, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.951674] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526166f1-0e55-2212-704a-b4d5ab3e65f2, 'name': SearchDatastore_Task, 'duration_secs': 0.010619} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.955044] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.955402] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 15326f55-2db8-47c3-b1fd-ce8ba1174c79/15326f55-2db8-47c3-b1fd-ce8ba1174c79.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 781.955877] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6831fc7b-d551-487e-aad2-6c5a1f738f14 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.963913] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 781.963913] env[63028]: value = "task-2735472" [ 781.963913] env[63028]: _type = "Task" [ 781.963913] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.973881] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.055630] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735467, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.097917] env[63028]: DEBUG oslo_vmware.api [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735469, 'name': ReconfigVM_Task, 'duration_secs': 0.211785} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.097917] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550729', 'volume_id': 'fdc60726-6ead-4fa7-9b0a-56a0c631789c', 'name': 'volume-fdc60726-6ead-4fa7-9b0a-56a0c631789c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd663c2df-ae54-4c50-a70f-e2180700c700', 'attached_at': '', 'detached_at': '', 'volume_id': 'fdc60726-6ead-4fa7-9b0a-56a0c631789c', 'serial': 'fdc60726-6ead-4fa7-9b0a-56a0c631789c'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 782.183500] env[63028]: DEBUG oslo_vmware.api [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735450, 'name': ReconfigVM_Task, 'duration_secs': 5.813299} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.183787] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.184014] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Reconfigured VM to detach interface {{(pid=63028) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 782.198054] env[63028]: DEBUG nova.compute.manager [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 782.256515] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735470, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.364681] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661a4a5a-4755-4869-b5b3-116b8a7a54da {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.375561] env[63028]: DEBUG nova.network.neutron [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Successfully created port: 2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 782.379271] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e6925f-a762-4fc1-8749-0f82ec355755 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.422170] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 782.423574] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-07c86d88-6cb9-409f-a117-b4476c4de176 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.431036] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ee0ee0-e875-4826-96f3-855b5c301602 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.448126] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee197cc6-37f2-4793-a7b8-274808b9168d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.454263] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735471, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.454883] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 782.454883] env[63028]: value = "task-2735473" [ 782.454883] env[63028]: _type = "Task" [ 782.454883] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.469709] env[63028]: DEBUG nova.compute.provider_tree [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.481471] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735473, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.488373] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735472, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504791} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.488693] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 15326f55-2db8-47c3-b1fd-ce8ba1174c79/15326f55-2db8-47c3-b1fd-ce8ba1174c79.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 782.489013] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 782.489269] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b76b788-68ab-44ae-9972-e1fe667493d1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.498824] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 782.498824] env[63028]: value = "task-2735474" [ 782.498824] env[63028]: _type = "Task" [ 782.498824] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.510681] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735474, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.554681] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735467, 'name': Rename_Task, 'duration_secs': 1.147693} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.555407] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 782.555546] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f7370728-c5a6-4175-b8d1-c42812071cf9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.565536] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Waiting for the task: (returnval){ [ 782.565536] env[63028]: value = "task-2735475" [ 782.565536] env[63028]: _type = "Task" [ 782.565536] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.577014] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.684377] env[63028]: DEBUG nova.objects.instance [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lazy-loading 'flavor' on Instance uuid d663c2df-ae54-4c50-a70f-e2180700c700 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 782.721496] env[63028]: DEBUG nova.compute.manager [req-22c3900c-e998-4c3d-a097-04bfc8008e2d req-e10eb3be-9f4a-447e-ac91-65f4c05c61b6 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Received event network-vif-deleted-192a1d7c-32a1-4b52-944b-c6c3b6c52f93 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 782.721705] env[63028]: INFO nova.compute.manager [req-22c3900c-e998-4c3d-a097-04bfc8008e2d req-e10eb3be-9f4a-447e-ac91-65f4c05c61b6 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Neutron deleted interface 192a1d7c-32a1-4b52-944b-c6c3b6c52f93; detaching it from the instance and deleting it from the info cache [ 782.721967] env[63028]: DEBUG nova.network.neutron [req-22c3900c-e998-4c3d-a097-04bfc8008e2d req-e10eb3be-9f4a-447e-ac91-65f4c05c61b6 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Updating instance_info_cache with network_info: [{"id": "39f160e2-809e-4b2c-9424-70448b807385", "address": "fa:16:3e:f5:5d:b4", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39f160e2-80", "ovs_interfaceid": "39f160e2-809e-4b2c-9424-70448b807385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.758928] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735470, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.937748] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735471, 'name': CreateSnapshot_Task, 'duration_secs': 0.789522} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.938097] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 782.938977] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06763388-8527-4c4f-8487-d3227690a287 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.967255] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735473, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.977106] env[63028]: DEBUG nova.scheduler.client.report [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 783.012824] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735474, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071772} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.012824] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 783.013627] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1068f3-6d5f-40bb-85a8-c2d3716cdb4c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.046262] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 15326f55-2db8-47c3-b1fd-ce8ba1174c79/15326f55-2db8-47c3-b1fd-ce8ba1174c79.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 783.046519] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a0fd849-031d-4980-b36e-568f6907b920 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.071391] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 783.071391] env[63028]: value = "task-2735476" [ 783.071391] env[63028]: _type = "Task" [ 783.071391] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.080374] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735475, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.100242] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735476, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.212118] env[63028]: DEBUG nova.compute.manager [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 783.224507] env[63028]: DEBUG oslo_concurrency.lockutils [req-22c3900c-e998-4c3d-a097-04bfc8008e2d req-e10eb3be-9f4a-447e-ac91-65f4c05c61b6 service nova] Acquiring lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.224685] env[63028]: DEBUG oslo_concurrency.lockutils [req-22c3900c-e998-4c3d-a097-04bfc8008e2d req-e10eb3be-9f4a-447e-ac91-65f4c05c61b6 service nova] Acquired lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.225605] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18b3075-1e27-46fb-89ca-9a5c7fe9f4b7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.252553] env[63028]: DEBUG oslo_concurrency.lockutils [req-22c3900c-e998-4c3d-a097-04bfc8008e2d req-e10eb3be-9f4a-447e-ac91-65f4c05c61b6 service nova] Releasing lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.252907] env[63028]: WARNING nova.compute.manager [req-22c3900c-e998-4c3d-a097-04bfc8008e2d req-e10eb3be-9f4a-447e-ac91-65f4c05c61b6 service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Detach interface failed, port_id=192a1d7c-32a1-4b52-944b-c6c3b6c52f93, reason: No device with interface-id 192a1d7c-32a1-4b52-944b-c6c3b6c52f93 exists on VM: nova.exception.NotFound: No device with interface-id 192a1d7c-32a1-4b52-944b-c6c3b6c52f93 exists on VM [ 783.270547] env[63028]: DEBUG nova.virt.hardware [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 783.270547] env[63028]: DEBUG nova.virt.hardware [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 783.270547] env[63028]: DEBUG nova.virt.hardware [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 783.270547] env[63028]: DEBUG nova.virt.hardware [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 783.270547] env[63028]: DEBUG nova.virt.hardware [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 783.270547] env[63028]: DEBUG nova.virt.hardware [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 783.270547] env[63028]: DEBUG nova.virt.hardware [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 783.270547] env[63028]: DEBUG nova.virt.hardware [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 783.270547] env[63028]: DEBUG nova.virt.hardware [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 783.270547] env[63028]: DEBUG nova.virt.hardware [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 783.270547] env[63028]: DEBUG nova.virt.hardware [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 783.273451] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54054e98-f6b8-406f-85fd-9656ec7a5d5a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.281020] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735470, 'name': CloneVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.284469] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca0af96-57ec-4603-a706-f4126775753f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.458179] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 783.458629] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4c9444b5-ba36-4e97-9d36-a663c62bf46f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.472693] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735473, 'name': CreateSnapshot_Task, 'duration_secs': 0.801254} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.474198] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 783.474684] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 783.474684] env[63028]: value = "task-2735477" [ 783.474684] env[63028]: _type = "Task" [ 783.474684] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.475654] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169da51c-b2cd-4894-a042-ae35c871391c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.483890] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.298s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.485935] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.652s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.487584] env[63028]: INFO nova.compute.claims [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 783.501033] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735477, 'name': CloneVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.512159] env[63028]: INFO nova.scheduler.client.report [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Deleted allocations for instance e2d39c43-6666-4fda-b8e2-485399c59e46 [ 783.580947] env[63028]: DEBUG oslo_vmware.api [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735475, 'name': PowerOnVM_Task, 'duration_secs': 0.701514} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.581343] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 783.581704] env[63028]: INFO nova.compute.manager [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Took 10.32 seconds to spawn the instance on the hypervisor. [ 783.582259] env[63028]: DEBUG nova.compute.manager [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 783.582870] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.583034] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.583588] env[63028]: DEBUG nova.network.neutron [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 783.586220] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a801db8d-8aa6-442d-9f23-41ab2e411528 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.593398] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735476, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.642707] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.643014] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.643227] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "5a340e31-678c-437e-aa4e-07d5d9f4334f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.643636] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "5a340e31-678c-437e-aa4e-07d5d9f4334f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.643855] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "5a340e31-678c-437e-aa4e-07d5d9f4334f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.646256] env[63028]: INFO nova.compute.manager [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Terminating instance [ 783.693101] env[63028]: DEBUG oslo_concurrency.lockutils [None req-810d3462-17db-4731-b651-a3f2b28d92a4 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "d663c2df-ae54-4c50-a70f-e2180700c700" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.318s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.765619] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735470, 'name': CloneVM_Task, 'duration_secs': 1.690785} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.765923] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Created linked-clone VM from snapshot [ 783.766784] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9478fb9-0fdf-44a4-ade0-857d98e4308c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.779360] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Uploading image 40205a3b-c684-4015-938e-72089044b955 {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 783.807448] env[63028]: DEBUG oslo_vmware.rw_handles [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 783.807448] env[63028]: value = "vm-550738" [ 783.807448] env[63028]: _type = "VirtualMachine" [ 783.807448] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 783.808855] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-09167191-c388-46bf-9c95-7ff0e34f9805 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.820257] env[63028]: DEBUG oslo_vmware.rw_handles [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lease: (returnval){ [ 783.820257] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a39b88-12e0-fc46-a6b0-276909b2ba32" [ 783.820257] env[63028]: _type = "HttpNfcLease" [ 783.820257] env[63028]: } obtained for exporting VM: (result){ [ 783.820257] env[63028]: value = "vm-550738" [ 783.820257] env[63028]: _type = "VirtualMachine" [ 783.820257] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 783.820528] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the lease: (returnval){ [ 783.820528] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a39b88-12e0-fc46-a6b0-276909b2ba32" [ 783.820528] env[63028]: _type = "HttpNfcLease" [ 783.820528] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 783.828603] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 783.828603] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a39b88-12e0-fc46-a6b0-276909b2ba32" [ 783.828603] env[63028]: _type = "HttpNfcLease" [ 783.828603] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 783.989579] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735477, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.009030] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 784.010065] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6335f7b6-cd10-4c22-aa7f-0458c8ff3587 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.022392] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 784.022392] env[63028]: value = "task-2735479" [ 784.022392] env[63028]: _type = "Task" [ 784.022392] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.022909] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f13d02a8-8d3e-4b78-b445-d8381137a85a tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "e2d39c43-6666-4fda-b8e2-485399c59e46" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.856s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.034062] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735479, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.084105] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735476, 'name': ReconfigVM_Task, 'duration_secs': 0.857156} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.084426] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 15326f55-2db8-47c3-b1fd-ce8ba1174c79/15326f55-2db8-47c3-b1fd-ce8ba1174c79.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 784.085171] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f8120a1e-0402-4a62-b324-1c173cbfe5d5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.095873] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 784.095873] env[63028]: value = "task-2735480" [ 784.095873] env[63028]: _type = "Task" [ 784.095873] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.109310] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735480, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.111507] env[63028]: INFO nova.compute.manager [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Took 37.04 seconds to build instance. [ 784.150425] env[63028]: DEBUG nova.compute.manager [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 784.150688] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 784.151668] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1073a444-8840-485c-84f2-a8372b5a50e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.162493] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 784.162688] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82e990bf-74ab-4713-b11a-e16655d6bac2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.170434] env[63028]: DEBUG oslo_vmware.api [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 784.170434] env[63028]: value = "task-2735481" [ 784.170434] env[63028]: _type = "Task" [ 784.170434] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.180981] env[63028]: DEBUG oslo_vmware.api [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735481, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.329812] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 784.329812] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a39b88-12e0-fc46-a6b0-276909b2ba32" [ 784.329812] env[63028]: _type = "HttpNfcLease" [ 784.329812] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 784.330292] env[63028]: DEBUG oslo_vmware.rw_handles [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 784.330292] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a39b88-12e0-fc46-a6b0-276909b2ba32" [ 784.330292] env[63028]: _type = "HttpNfcLease" [ 784.330292] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 784.337960] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075429cc-d3fa-468b-9c91-7d9d3b562e71 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.356898] env[63028]: DEBUG oslo_vmware.rw_handles [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529aec82-3d3c-151b-5e3e-11fe98f94696/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 784.357247] env[63028]: DEBUG oslo_vmware.rw_handles [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529aec82-3d3c-151b-5e3e-11fe98f94696/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 784.466067] env[63028]: DEBUG nova.network.neutron [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Updating instance_info_cache with network_info: [{"id": "39f160e2-809e-4b2c-9424-70448b807385", "address": "fa:16:3e:f5:5d:b4", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39f160e2-80", "ovs_interfaceid": "39f160e2-809e-4b2c-9424-70448b807385", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.469221] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4b907600-14be-4e0e-84ac-7ee70c60a071 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.489553] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735477, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.537510] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735479, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.538278] env[63028]: DEBUG nova.network.neutron [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Successfully updated port: 2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 784.606197] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735480, 'name': Rename_Task, 'duration_secs': 0.224245} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.606528] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 784.607599] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3096f7e-cef7-408b-b353-2ab3d4a8a632 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.613710] env[63028]: DEBUG oslo_concurrency.lockutils [None req-be8438c3-6eec-42fd-9020-8898f3ea9278 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Lock "7e914e49-0d70-4024-940b-ad2a15e9dff7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.448s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.616599] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 784.616599] env[63028]: value = "task-2735482" [ 784.616599] env[63028]: _type = "Task" [ 784.616599] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.630262] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735482, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.681839] env[63028]: DEBUG oslo_vmware.api [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735481, 'name': PowerOffVM_Task, 'duration_secs': 0.220722} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.685307] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 784.685612] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 784.686197] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f52dc953-6676-40d0-803f-24a49d341335 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.764662] env[63028]: DEBUG nova.compute.manager [req-a5b2fca5-d4e2-466b-b950-ffb88f0f92f7 req-32bacc92-193c-4861-9036-2580ab694663 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Received event network-vif-plugged-2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 784.764903] env[63028]: DEBUG oslo_concurrency.lockutils [req-a5b2fca5-d4e2-466b-b950-ffb88f0f92f7 req-32bacc92-193c-4861-9036-2580ab694663 service nova] Acquiring lock "85aafadb-81d6-4687-aed1-fbe829e5f95f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.765632] env[63028]: DEBUG oslo_concurrency.lockutils [req-a5b2fca5-d4e2-466b-b950-ffb88f0f92f7 req-32bacc92-193c-4861-9036-2580ab694663 service nova] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.765792] env[63028]: DEBUG oslo_concurrency.lockutils [req-a5b2fca5-d4e2-466b-b950-ffb88f0f92f7 req-32bacc92-193c-4861-9036-2580ab694663 service nova] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.766051] env[63028]: DEBUG nova.compute.manager [req-a5b2fca5-d4e2-466b-b950-ffb88f0f92f7 req-32bacc92-193c-4861-9036-2580ab694663 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] No waiting events found dispatching network-vif-plugged-2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 784.766309] env[63028]: WARNING nova.compute.manager [req-a5b2fca5-d4e2-466b-b950-ffb88f0f92f7 req-32bacc92-193c-4861-9036-2580ab694663 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Received unexpected event network-vif-plugged-2e2d8403-826c-4e24-ba3c-123d444d1fdc for instance with vm_state building and task_state spawning. [ 784.839859] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 784.840243] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 784.840765] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Deleting the datastore file [datastore2] 5a340e31-678c-437e-aa4e-07d5d9f4334f {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 784.841431] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4de93ffc-e67b-416b-9fd9-c1468aceb8be {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.854701] env[63028]: DEBUG oslo_vmware.api [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 784.854701] env[63028]: value = "task-2735484" [ 784.854701] env[63028]: _type = "Task" [ 784.854701] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.867299] env[63028]: DEBUG oslo_vmware.api [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735484, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.971260] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-5a340e31-678c-437e-aa4e-07d5d9f4334f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.002616] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735477, 'name': CloneVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.045340] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.048871] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.049229] env[63028]: DEBUG nova.network.neutron [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 785.050807] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735479, 'name': CloneVM_Task} progress is 95%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.066920] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "56d39801-f3e7-4cfe-a038-6a5e762bfda8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.067073] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "56d39801-f3e7-4cfe-a038-6a5e762bfda8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.124195] env[63028]: DEBUG nova.compute.manager [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 785.135054] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735482, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.142518] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f41f50f-9caa-49b7-9cd6-2580adf9d7af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.151974] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c1920fa-ff5a-456a-8061-80d45fc7944f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.187885] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478aa539-a950-4de2-a3d5-83e6b264483a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.198400] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055348f3-37db-4d97-9f33-c7828030092c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.219691] env[63028]: DEBUG nova.compute.provider_tree [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.289025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquiring lock "13e0ca05-3ab3-43e2-8b0d-8045e26d6723" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.289308] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lock "13e0ca05-3ab3-43e2-8b0d-8045e26d6723" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.369588] env[63028]: DEBUG oslo_vmware.api [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735484, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227123} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.370764] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 785.371072] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 785.371275] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 785.372040] env[63028]: INFO nova.compute.manager [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Took 1.22 seconds to destroy the instance on the hypervisor. [ 785.372040] env[63028]: DEBUG oslo.service.loopingcall [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 785.372673] env[63028]: DEBUG nova.compute.manager [-] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 785.372673] env[63028]: DEBUG nova.network.neutron [-] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 785.479107] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6118af02-15c9-41e2-bf63-dacf4033738f tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-5a340e31-678c-437e-aa4e-07d5d9f4334f-192a1d7c-32a1-4b52-944b-c6c3b6c52f93" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.919s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.493363] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735477, 'name': CloneVM_Task, 'duration_secs': 1.567306} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.493527] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Created linked-clone VM from snapshot [ 785.494318] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72952aa4-7be0-4918-be9c-c92182571021 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.503852] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Uploading image 6d115202-aa15-409e-8b5a-b19083ab5b5f {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 785.532415] env[63028]: DEBUG oslo_vmware.rw_handles [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 785.532415] env[63028]: value = "vm-550741" [ 785.532415] env[63028]: _type = "VirtualMachine" [ 785.532415] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 785.532415] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6690f8b0-9ca2-4f62-a2be-a7127143908d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.538394] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735479, 'name': CloneVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.545243] env[63028]: DEBUG oslo_vmware.rw_handles [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lease: (returnval){ [ 785.545243] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52087519-7115-1097-f006-ecdac2cb40b4" [ 785.545243] env[63028]: _type = "HttpNfcLease" [ 785.545243] env[63028]: } obtained for exporting VM: (result){ [ 785.545243] env[63028]: value = "vm-550741" [ 785.545243] env[63028]: _type = "VirtualMachine" [ 785.545243] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 785.545559] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the lease: (returnval){ [ 785.545559] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52087519-7115-1097-f006-ecdac2cb40b4" [ 785.545559] env[63028]: _type = "HttpNfcLease" [ 785.545559] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 785.553819] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 785.553819] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52087519-7115-1097-f006-ecdac2cb40b4" [ 785.553819] env[63028]: _type = "HttpNfcLease" [ 785.553819] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 785.629084] env[63028]: DEBUG oslo_vmware.api [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735482, 'name': PowerOnVM_Task, 'duration_secs': 0.624391} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.629508] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 785.629749] env[63028]: INFO nova.compute.manager [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Took 9.66 seconds to spawn the instance on the hypervisor. [ 785.630855] env[63028]: DEBUG nova.compute.manager [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 785.632594] env[63028]: DEBUG nova.network.neutron [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.637932] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fa0d84-26c4-48dc-b3d3-ce3d09e2d75f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.660777] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.724342] env[63028]: DEBUG nova.scheduler.client.report [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 786.039077] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735479, 'name': CloneVM_Task, 'duration_secs': 1.524459} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.042581] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Created linked-clone VM from snapshot [ 786.043421] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61534753-a2ea-45f2-9737-6a2eeed3d29c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.065488] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 786.065488] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52087519-7115-1097-f006-ecdac2cb40b4" [ 786.065488] env[63028]: _type = "HttpNfcLease" [ 786.065488] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 786.069762] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Uploading image c9823cf3-c81e-4b18-855a-a01f46d8c790 {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 786.072560] env[63028]: DEBUG oslo_vmware.rw_handles [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 786.072560] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52087519-7115-1097-f006-ecdac2cb40b4" [ 786.072560] env[63028]: _type = "HttpNfcLease" [ 786.072560] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 786.073524] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e47fd00-04bc-4d29-9051-04b84e1caf65 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.083836] env[63028]: DEBUG oslo_vmware.rw_handles [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5206934e-c105-f1f4-1693-3b63024e6dee/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 786.086029] env[63028]: DEBUG oslo_vmware.rw_handles [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5206934e-c105-f1f4-1693-3b63024e6dee/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 786.163424] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 786.163424] env[63028]: value = "vm-550742" [ 786.163424] env[63028]: _type = "VirtualMachine" [ 786.163424] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 786.164835] env[63028]: DEBUG nova.network.neutron [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updating instance_info_cache with network_info: [{"id": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "address": "fa:16:3e:09:d0:a2", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d8403-82", "ovs_interfaceid": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.166242] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-40f618ec-d4cb-4417-975d-52a802987973 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.173859] env[63028]: INFO nova.compute.manager [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Took 38.60 seconds to build instance. [ 786.178234] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lease: (returnval){ [ 786.178234] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524ea653-f992-828e-96bf-34481a8bd447" [ 786.178234] env[63028]: _type = "HttpNfcLease" [ 786.178234] env[63028]: } obtained for exporting VM: (result){ [ 786.178234] env[63028]: value = "vm-550742" [ 786.178234] env[63028]: _type = "VirtualMachine" [ 786.178234] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 786.178611] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the lease: (returnval){ [ 786.178611] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524ea653-f992-828e-96bf-34481a8bd447" [ 786.178611] env[63028]: _type = "HttpNfcLease" [ 786.178611] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 786.186607] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 786.186607] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524ea653-f992-828e-96bf-34481a8bd447" [ 786.186607] env[63028]: _type = "HttpNfcLease" [ 786.186607] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 786.216278] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e3196b75-1f42-44fc-b5fc-330cfea70ace {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.229964] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.744s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.231237] env[63028]: DEBUG nova.compute.manager [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 786.234834] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.594s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.235234] env[63028]: DEBUG nova.objects.instance [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lazy-loading 'resources' on Instance uuid 5a330ed9-c106-49f2-b524-a424e717b5ce {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 786.453837] env[63028]: DEBUG nova.network.neutron [-] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.671316] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.671435] env[63028]: DEBUG nova.compute.manager [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Instance network_info: |[{"id": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "address": "fa:16:3e:09:d0:a2", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d8403-82", "ovs_interfaceid": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 786.671945] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:d0:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f01bbee7-8b9a-46be-891e-59d8142fb359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e2d8403-826c-4e24-ba3c-123d444d1fdc', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.681485] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating folder: Project (11332c2adbdc41928d4bf084435e2037). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.682884] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9ad4531-4478-4b68-8e6a-ce52da2b3c63 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "15326f55-2db8-47c3-b1fd-ce8ba1174c79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.654s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.683201] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8be84026-ecd3-47c4-ad3c-0eb7818c70ca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.704661] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 786.704661] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524ea653-f992-828e-96bf-34481a8bd447" [ 786.704661] env[63028]: _type = "HttpNfcLease" [ 786.704661] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 786.708367] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 786.708367] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524ea653-f992-828e-96bf-34481a8bd447" [ 786.708367] env[63028]: _type = "HttpNfcLease" [ 786.708367] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 786.712179] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3a9f7d-ebd2-4648-a5b4-dcd89c032f72 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.715251] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Created folder: Project (11332c2adbdc41928d4bf084435e2037) in parent group-v550570. [ 786.715944] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating folder: Instances. Parent ref: group-v550743. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.718231] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9634d7e5-949d-4a34-8884-d3f27e230dcd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.726601] env[63028]: DEBUG nova.compute.manager [req-5990eab4-7961-456e-811e-b3b52dd1e6ec req-cef96d13-cedd-4835-bccc-c4df7cbe33d6 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Received event network-changed-4bf1e126-9048-489d-9c61-18ebe4c2af31 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 786.727410] env[63028]: DEBUG nova.compute.manager [req-5990eab4-7961-456e-811e-b3b52dd1e6ec req-cef96d13-cedd-4835-bccc-c4df7cbe33d6 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Refreshing instance network info cache due to event network-changed-4bf1e126-9048-489d-9c61-18ebe4c2af31. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 786.727587] env[63028]: DEBUG oslo_concurrency.lockutils [req-5990eab4-7961-456e-811e-b3b52dd1e6ec req-cef96d13-cedd-4835-bccc-c4df7cbe33d6 service nova] Acquiring lock "refresh_cache-7e914e49-0d70-4024-940b-ad2a15e9dff7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.727731] env[63028]: DEBUG oslo_concurrency.lockutils [req-5990eab4-7961-456e-811e-b3b52dd1e6ec req-cef96d13-cedd-4835-bccc-c4df7cbe33d6 service nova] Acquired lock "refresh_cache-7e914e49-0d70-4024-940b-ad2a15e9dff7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.727896] env[63028]: DEBUG nova.network.neutron [req-5990eab4-7961-456e-811e-b3b52dd1e6ec req-cef96d13-cedd-4835-bccc-c4df7cbe33d6 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Refreshing network info cache for port 4bf1e126-9048-489d-9c61-18ebe4c2af31 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 786.735684] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ede4e1-188f-f9db-77ec-c6bf10e2a7d2/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 786.737236] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ede4e1-188f-f9db-77ec-c6bf10e2a7d2/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 786.745786] env[63028]: DEBUG nova.compute.utils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 786.758370] env[63028]: DEBUG nova.compute.manager [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 786.758370] env[63028]: DEBUG nova.network.neutron [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 786.759754] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Created folder: Instances in parent group-v550743. [ 786.759999] env[63028]: DEBUG oslo.service.loopingcall [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.818950] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 786.824082] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ffd52d40-2704-4660-97e0-bf6372e0734b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.852790] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 786.852790] env[63028]: value = "task-2735489" [ 786.852790] env[63028]: _type = "Task" [ 786.852790] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.868032] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735489, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.897070] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fa031903-b3db-4eae-b0d5-eeec780d6c12 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.911259] env[63028]: DEBUG nova.policy [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1db2756e554d4fa2a66cff81ab6d4105', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e14d427c980c486cbbe8ff0982a30428', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 786.960616] env[63028]: INFO nova.compute.manager [-] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Took 1.59 seconds to deallocate network for instance. [ 787.198875] env[63028]: DEBUG nova.compute.manager [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 787.237155] env[63028]: DEBUG nova.compute.manager [req-a46b9d80-bed8-40b2-9994-a08a2a14979a req-e8da96fe-0345-415f-9c88-1776fd747e6e service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Received event network-changed-2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 787.237308] env[63028]: DEBUG nova.compute.manager [req-a46b9d80-bed8-40b2-9994-a08a2a14979a req-e8da96fe-0345-415f-9c88-1776fd747e6e service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Refreshing instance network info cache due to event network-changed-2e2d8403-826c-4e24-ba3c-123d444d1fdc. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 787.237558] env[63028]: DEBUG oslo_concurrency.lockutils [req-a46b9d80-bed8-40b2-9994-a08a2a14979a req-e8da96fe-0345-415f-9c88-1776fd747e6e service nova] Acquiring lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.237662] env[63028]: DEBUG oslo_concurrency.lockutils [req-a46b9d80-bed8-40b2-9994-a08a2a14979a req-e8da96fe-0345-415f-9c88-1776fd747e6e service nova] Acquired lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.237851] env[63028]: DEBUG nova.network.neutron [req-a46b9d80-bed8-40b2-9994-a08a2a14979a req-e8da96fe-0345-415f-9c88-1776fd747e6e service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Refreshing network info cache for port 2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.259032] env[63028]: DEBUG nova.compute.manager [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 787.295956] env[63028]: DEBUG nova.network.neutron [req-5990eab4-7961-456e-811e-b3b52dd1e6ec req-cef96d13-cedd-4835-bccc-c4df7cbe33d6 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Updated VIF entry in instance network info cache for port 4bf1e126-9048-489d-9c61-18ebe4c2af31. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 787.295956] env[63028]: DEBUG nova.network.neutron [req-5990eab4-7961-456e-811e-b3b52dd1e6ec req-cef96d13-cedd-4835-bccc-c4df7cbe33d6 service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Updating instance_info_cache with network_info: [{"id": "4bf1e126-9048-489d-9c61-18ebe4c2af31", "address": "fa:16:3e:86:dd:f7", "network": {"id": "b27ad7b3-fa4d-4831-9ec4-23010a1301bc", "bridge": "br-int", "label": "tempest-ServersTestJSON-276288407-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3008f14179f4461aab24b22ce9174433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bf1e126-90", "ovs_interfaceid": "4bf1e126-9048-489d-9c61-18ebe4c2af31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.326266] env[63028]: DEBUG nova.network.neutron [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Successfully created port: 7dd5be5d-a88d-4dcd-a42d-7842895207f7 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 787.367374] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735489, 'name': CreateVM_Task, 'duration_secs': 0.472471} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.367533] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 787.368271] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.368507] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.368756] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 787.369046] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39607cd3-46b0-4e58-a9b1-fb31f4964a42 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.378110] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 787.378110] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b78344-076f-210a-1166-f87039279efa" [ 787.378110] env[63028]: _type = "Task" [ 787.378110] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.395042] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b78344-076f-210a-1166-f87039279efa, 'name': SearchDatastore_Task, 'duration_secs': 0.012511} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.395287] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.395515] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 787.395886] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.396224] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.396454] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 787.396786] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbe1461f-baaa-425c-8685-ad11e89d452b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.411210] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 787.411718] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 787.412843] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cee34fc-b976-4874-a645-b62816622b46 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.423077] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 787.423077] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5232d997-8c1f-03ab-16ba-83c99cc1d3b8" [ 787.423077] env[63028]: _type = "Task" [ 787.423077] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.438638] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5232d997-8c1f-03ab-16ba-83c99cc1d3b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.469018] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.484170] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4296723-8add-4ed9-a9e0-70c8d2961e86 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.492763] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "022125c4-2b0c-4a2c-ae63-18968887316e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.493241] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "022125c4-2b0c-4a2c-ae63-18968887316e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.499550] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd2c370-e5b4-44b1-be90-1f63b6dc2943 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.533521] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b462259c-a784-4ebf-b4f6-92f351aeb7cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.543197] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d0dd08-02e7-414e-89cc-93f4888e529a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.559829] env[63028]: DEBUG nova.compute.provider_tree [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.717887] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.797214] env[63028]: DEBUG oslo_concurrency.lockutils [req-5990eab4-7961-456e-811e-b3b52dd1e6ec req-cef96d13-cedd-4835-bccc-c4df7cbe33d6 service nova] Releasing lock "refresh_cache-7e914e49-0d70-4024-940b-ad2a15e9dff7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.944020] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5232d997-8c1f-03ab-16ba-83c99cc1d3b8, 'name': SearchDatastore_Task, 'duration_secs': 0.013493} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.945492] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e33db89c-88a9-4d8f-8bc8-f29f240b2e9f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.953970] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 787.953970] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5278a485-dc35-8cd3-acd1-3463e2b5d7d5" [ 787.953970] env[63028]: _type = "Task" [ 787.953970] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.973402] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5278a485-dc35-8cd3-acd1-3463e2b5d7d5, 'name': SearchDatastore_Task, 'duration_secs': 0.015428} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.973897] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.974368] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 85aafadb-81d6-4687-aed1-fbe829e5f95f/85aafadb-81d6-4687-aed1-fbe829e5f95f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.974958] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07026ece-0e75-4240-b889-36a8081ae6b1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.984203] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 787.984203] env[63028]: value = "task-2735490" [ 787.984203] env[63028]: _type = "Task" [ 787.984203] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.994672] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735490, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.063364] env[63028]: DEBUG nova.scheduler.client.report [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 788.081127] env[63028]: DEBUG nova.network.neutron [req-a46b9d80-bed8-40b2-9994-a08a2a14979a req-e8da96fe-0345-415f-9c88-1776fd747e6e service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updated VIF entry in instance network info cache for port 2e2d8403-826c-4e24-ba3c-123d444d1fdc. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 788.081617] env[63028]: DEBUG nova.network.neutron [req-a46b9d80-bed8-40b2-9994-a08a2a14979a req-e8da96fe-0345-415f-9c88-1776fd747e6e service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updating instance_info_cache with network_info: [{"id": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "address": "fa:16:3e:09:d0:a2", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d8403-82", "ovs_interfaceid": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.274520] env[63028]: DEBUG nova.compute.manager [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 788.497625] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735490, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.569580] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.335s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.573069] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.628s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.573069] env[63028]: DEBUG nova.objects.instance [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lazy-loading 'resources' on Instance uuid 0d96ba8e-b46b-48ae-957c-cdc49762c395 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 788.584374] env[63028]: DEBUG oslo_concurrency.lockutils [req-a46b9d80-bed8-40b2-9994-a08a2a14979a req-e8da96fe-0345-415f-9c88-1776fd747e6e service nova] Releasing lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.584649] env[63028]: DEBUG nova.compute.manager [req-a46b9d80-bed8-40b2-9994-a08a2a14979a req-e8da96fe-0345-415f-9c88-1776fd747e6e service nova] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Received event network-vif-deleted-39f160e2-809e-4b2c-9424-70448b807385 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 788.597825] env[63028]: INFO nova.scheduler.client.report [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Deleted allocations for instance 5a330ed9-c106-49f2-b524-a424e717b5ce [ 788.848626] env[63028]: DEBUG nova.network.neutron [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Successfully updated port: 7dd5be5d-a88d-4dcd-a42d-7842895207f7 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 788.999020] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735490, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.676477} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.999416] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 85aafadb-81d6-4687-aed1-fbe829e5f95f/85aafadb-81d6-4687-aed1-fbe829e5f95f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 788.999657] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.999929] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0246df7-adb9-43da-aea2-4676252737f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.009220] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 789.009220] env[63028]: value = "task-2735491" [ 789.009220] env[63028]: _type = "Task" [ 789.009220] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.018724] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735491, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.106271] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42a4e41f-5fdd-4dd8-bb5e-bc5dbf266ec6 tempest-ServerRescueTestJSON-1583928659 tempest-ServerRescueTestJSON-1583928659-project-member] Lock "5a330ed9-c106-49f2-b524-a424e717b5ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.101s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.354806] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.355137] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.355137] env[63028]: DEBUG nova.network.neutron [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.521464] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735491, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121699} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.521693] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 789.525090] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f71bd89-8694-4f60-a3cc-f2bc595bbbfe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.551507] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 85aafadb-81d6-4687-aed1-fbe829e5f95f/85aafadb-81d6-4687-aed1-fbe829e5f95f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 789.554681] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d830489e-dc89-4d90-b988-9689a8df975f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.577782] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 789.577782] env[63028]: value = "task-2735492" [ 789.577782] env[63028]: _type = "Task" [ 789.577782] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.590581] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735492, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.620900] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72dabb9-0bb0-422e-8743-27529718e160 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.628978] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47c1b41-967c-46be-9d9d-5972a695c5cf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.662277] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766e84a8-2eb1-4ad7-aae4-389f2bc289c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.673482] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c776a8-cb08-4fbe-903d-07c27ec8908d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.689680] env[63028]: DEBUG nova.compute.provider_tree [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.893987] env[63028]: DEBUG nova.network.neutron [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.045454] env[63028]: DEBUG nova.network.neutron [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance_info_cache with network_info: [{"id": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "address": "fa:16:3e:45:d9:fe", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dd5be5d-a8", "ovs_interfaceid": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.089269] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735492, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.193498] env[63028]: DEBUG nova.scheduler.client.report [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 790.548742] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.549218] env[63028]: DEBUG nova.compute.manager [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Instance network_info: |[{"id": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "address": "fa:16:3e:45:d9:fe", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dd5be5d-a8", "ovs_interfaceid": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 790.589523] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735492, 'name': ReconfigVM_Task, 'duration_secs': 0.696861} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.589971] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 85aafadb-81d6-4687-aed1-fbe829e5f95f/85aafadb-81d6-4687-aed1-fbe829e5f95f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 790.590508] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf1e60ed-5ddf-4335-8e02-49a4a19bee17 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.598387] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 790.598387] env[63028]: value = "task-2735493" [ 790.598387] env[63028]: _type = "Task" [ 790.598387] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.608576] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735493, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.699990] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.128s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.703057] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.593s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.703057] env[63028]: DEBUG nova.objects.instance [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lazy-loading 'resources' on Instance uuid 0e07a6cd-8c99-408d-95ba-63f7839c327f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 790.718042] env[63028]: INFO nova.scheduler.client.report [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Deleted allocations for instance 0d96ba8e-b46b-48ae-957c-cdc49762c395 [ 791.110462] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735493, 'name': Rename_Task, 'duration_secs': 0.212124} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.110801] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 791.111127] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d8904fa-4c9f-4536-ae4b-8c9b03afc605 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.119183] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 791.119183] env[63028]: value = "task-2735494" [ 791.119183] env[63028]: _type = "Task" [ 791.119183] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.128378] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735494, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.226093] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7375b29d-7ac6-4525-bc12-7fc987c16da6 tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "0d96ba8e-b46b-48ae-957c-cdc49762c395" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.923s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.630796] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735494, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.730441] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a60e190-80ca-4ca2-92e7-624104b88cb4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.740365] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70417a9b-e9a7-48ec-a22b-8c548b39d569 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.777503] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6534964e-4429-43fa-8e16-ba898d25ad9c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.787096] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9f380b-4164-4a27-9312-8b2a879327dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.806445] env[63028]: DEBUG nova.compute.provider_tree [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.131075] env[63028]: DEBUG oslo_vmware.api [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735494, 'name': PowerOnVM_Task, 'duration_secs': 0.576921} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.131467] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 792.131703] env[63028]: INFO nova.compute.manager [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Took 8.92 seconds to spawn the instance on the hypervisor. [ 792.131966] env[63028]: DEBUG nova.compute.manager [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 792.132864] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c2750d-4937-41ec-95d3-53cf595d7b75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.310158] env[63028]: DEBUG nova.scheduler.client.report [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.654082] env[63028]: INFO nova.compute.manager [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Took 40.79 seconds to build instance. [ 792.816063] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.113s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.818805] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.656s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.819096] env[63028]: DEBUG nova.objects.instance [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Lazy-loading 'resources' on Instance uuid c0db2b2a-9c06-409c-b48b-a0d5c127f2dc {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 792.837101] env[63028]: INFO nova.scheduler.client.report [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Deleted allocations for instance 0e07a6cd-8c99-408d-95ba-63f7839c327f [ 793.156291] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3a4e4332-06ce-40fd-b127-c2bc84c3f268 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.274s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.347710] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b094771b-ac22-43cb-a63f-5342863501ae tempest-AttachInterfacesUnderV243Test-174505349 tempest-AttachInterfacesUnderV243Test-174505349-project-member] Lock "0e07a6cd-8c99-408d-95ba-63f7839c327f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.236s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.660411] env[63028]: DEBUG nova.compute.manager [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 793.942217] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f305e4a8-909c-4c93-ab77-a41476ba0d38 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.951238] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac302be-5ef7-4774-8e5e-edae5045cf1d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.985445] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9e76fc-021c-4684-8860-fe686624c2d4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.994602] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5531a0-98bc-4414-aa0a-e016c3d9a89a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.009884] env[63028]: DEBUG nova.compute.provider_tree [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.106768] env[63028]: DEBUG nova.virt.hardware [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:57:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bd41cb6c-4f59-47c8-83b3-1b15a3032d86',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-149043815',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 794.107419] env[63028]: DEBUG nova.virt.hardware [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 794.107905] env[63028]: DEBUG nova.virt.hardware [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 794.107905] env[63028]: DEBUG nova.virt.hardware [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 794.108028] env[63028]: DEBUG nova.virt.hardware [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 794.108104] env[63028]: DEBUG nova.virt.hardware [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 794.108307] env[63028]: DEBUG nova.virt.hardware [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 794.108458] env[63028]: DEBUG nova.virt.hardware [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 794.108706] env[63028]: DEBUG nova.virt.hardware [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 794.108906] env[63028]: DEBUG nova.virt.hardware [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 794.109110] env[63028]: DEBUG nova.virt.hardware [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 794.111301] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5644db70-8c3f-4cc7-9e3e-178fdeb8c0a0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.122514] env[63028]: DEBUG oslo_vmware.rw_handles [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529aec82-3d3c-151b-5e3e-11fe98f94696/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 794.123839] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c40902-d44d-49cf-ac73-c51d6bbd0c94 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.129441] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9081b33d-4363-464f-901c-65f3d4d73d37 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.142980] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:d9:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7dd5be5d-a88d-4dcd-a42d-7842895207f7', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 794.150484] env[63028]: DEBUG oslo.service.loopingcall [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 794.157889] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 794.157889] env[63028]: DEBUG oslo_vmware.rw_handles [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529aec82-3d3c-151b-5e3e-11fe98f94696/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 794.157889] env[63028]: ERROR oslo_vmware.rw_handles [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529aec82-3d3c-151b-5e3e-11fe98f94696/disk-0.vmdk due to incomplete transfer. [ 794.159691] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3512646-e1f1-4909-b5e6-c4d0083efe1f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.173278] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cad89e44-c712-4dfb-b1b9-376d6e3dadbc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.176415] env[63028]: DEBUG oslo_vmware.rw_handles [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5206934e-c105-f1f4-1693-3b63024e6dee/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 794.179266] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7450c1-2b52-41cc-8ed0-825e5b13df7d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.188038] env[63028]: DEBUG oslo_vmware.rw_handles [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5206934e-c105-f1f4-1693-3b63024e6dee/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 794.188357] env[63028]: ERROR oslo_vmware.rw_handles [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5206934e-c105-f1f4-1693-3b63024e6dee/disk-0.vmdk due to incomplete transfer. [ 794.188544] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e1bd3ce3-cabf-443a-889b-94026fc23920 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.193028] env[63028]: DEBUG oslo_vmware.rw_handles [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529aec82-3d3c-151b-5e3e-11fe98f94696/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 794.193028] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Uploaded image 40205a3b-c684-4015-938e-72089044b955 to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 794.195131] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 794.195297] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 794.195297] env[63028]: value = "task-2735495" [ 794.195297] env[63028]: _type = "Task" [ 794.195297] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.196165] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.196747] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-13d5d9c6-a8f0-442b-ae2e-ed4af9ba2aa5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.204599] env[63028]: DEBUG oslo_vmware.rw_handles [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5206934e-c105-f1f4-1693-3b63024e6dee/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 794.204858] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Uploaded image 6d115202-aa15-409e-8b5a-b19083ab5b5f to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 794.206452] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 794.211085] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-dadce894-2092-446d-a5f2-b6ad1306eeb5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.212637] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735495, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.212969] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 794.212969] env[63028]: value = "task-2735496" [ 794.212969] env[63028]: _type = "Task" [ 794.212969] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.218996] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 794.218996] env[63028]: value = "task-2735497" [ 794.218996] env[63028]: _type = "Task" [ 794.218996] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.222062] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735496, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.232859] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735497, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.513995] env[63028]: DEBUG nova.scheduler.client.report [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 794.713677] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735495, 'name': CreateVM_Task, 'duration_secs': 0.491111} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.714523] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 794.718432] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.718863] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.719689] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 794.719689] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3568d4f7-eff1-4c0d-a89b-58ab9ec5904b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.730827] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735496, 'name': Destroy_Task, 'duration_secs': 0.384374} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.736243] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Destroyed the VM [ 794.736243] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 794.736243] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 794.736243] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52723e72-b277-4205-c24f-25ad3a2a7bad" [ 794.736243] env[63028]: _type = "Task" [ 794.736243] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.736243] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4236f64c-40e6-4507-a332-2a4de6dcb009 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.742347] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735497, 'name': Destroy_Task, 'duration_secs': 0.449559} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.743601] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Destroyed the VM [ 794.745297] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 794.746907] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-05ad78d4-0d02-4590-8617-b896dbd65c61 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.754101] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52723e72-b277-4205-c24f-25ad3a2a7bad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.754761] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 794.754761] env[63028]: value = "task-2735498" [ 794.754761] env[63028]: _type = "Task" [ 794.754761] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.760773] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 794.760773] env[63028]: value = "task-2735499" [ 794.760773] env[63028]: _type = "Task" [ 794.760773] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.765109] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735498, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.778745] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735499, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.972220] env[63028]: DEBUG nova.compute.manager [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Received event network-vif-plugged-7dd5be5d-a88d-4dcd-a42d-7842895207f7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 794.972220] env[63028]: DEBUG oslo_concurrency.lockutils [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] Acquiring lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.972220] env[63028]: DEBUG oslo_concurrency.lockutils [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] Lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.973196] env[63028]: DEBUG oslo_concurrency.lockutils [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] Lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.973196] env[63028]: DEBUG nova.compute.manager [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] No waiting events found dispatching network-vif-plugged-7dd5be5d-a88d-4dcd-a42d-7842895207f7 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 794.973196] env[63028]: WARNING nova.compute.manager [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Received unexpected event network-vif-plugged-7dd5be5d-a88d-4dcd-a42d-7842895207f7 for instance with vm_state building and task_state spawning. [ 794.973738] env[63028]: DEBUG nova.compute.manager [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Received event network-changed-7dd5be5d-a88d-4dcd-a42d-7842895207f7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 794.973852] env[63028]: DEBUG nova.compute.manager [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Refreshing instance network info cache due to event network-changed-7dd5be5d-a88d-4dcd-a42d-7842895207f7. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 794.974302] env[63028]: DEBUG oslo_concurrency.lockutils [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] Acquiring lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.974404] env[63028]: DEBUG oslo_concurrency.lockutils [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] Acquired lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.975089] env[63028]: DEBUG nova.network.neutron [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Refreshing network info cache for port 7dd5be5d-a88d-4dcd-a42d-7842895207f7 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 795.021312] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.201s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.024645] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 37.603s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.026062] env[63028]: DEBUG nova.objects.instance [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63028) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 795.053611] env[63028]: INFO nova.scheduler.client.report [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Deleted allocations for instance c0db2b2a-9c06-409c-b48b-a0d5c127f2dc [ 795.229969] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "4e859327-ccd3-440e-b884-67f6cdadf97f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.230261] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "4e859327-ccd3-440e-b884-67f6cdadf97f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.248786] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52723e72-b277-4205-c24f-25ad3a2a7bad, 'name': SearchDatastore_Task, 'duration_secs': 0.021808} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.249232] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.249555] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.250014] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.250195] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.252020] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.252020] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a64e25b7-4012-4c9b-affa-2afddc786b4d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.265026] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.265026] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 795.266668] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94cc9ec3-3268-461f-b745-1421eede5722 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.270250] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735498, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.280755] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 795.280755] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5227f46f-1439-ad5e-8831-ad0bce8cd044" [ 795.280755] env[63028]: _type = "Task" [ 795.280755] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.288222] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735499, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.297800] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5227f46f-1439-ad5e-8831-ad0bce8cd044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.564411] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ded586eb-1e36-4d00-b6ab-5bb55325d953 tempest-ServersTestFqdnHostnames-804115136 tempest-ServersTestFqdnHostnames-804115136-project-member] Lock "c0db2b2a-9c06-409c-b48b-a0d5c127f2dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.174s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.775101] env[63028]: DEBUG oslo_vmware.api [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735498, 'name': RemoveSnapshot_Task, 'duration_secs': 0.933828} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.780709] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 795.781054] env[63028]: INFO nova.compute.manager [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Took 15.70 seconds to snapshot the instance on the hypervisor. [ 795.784625] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "1af19279-e75b-4ec5-91f1-a0a101b229b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.784754] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "1af19279-e75b-4ec5-91f1-a0a101b229b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.784892] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "1af19279-e75b-4ec5-91f1-a0a101b229b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.785062] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "1af19279-e75b-4ec5-91f1-a0a101b229b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.785335] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "1af19279-e75b-4ec5-91f1-a0a101b229b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.794043] env[63028]: INFO nova.compute.manager [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Terminating instance [ 795.801822] env[63028]: DEBUG oslo_vmware.api [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735499, 'name': RemoveSnapshot_Task, 'duration_secs': 0.933179} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.803243] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 795.803734] env[63028]: INFO nova.compute.manager [None req-644d24eb-7e16-443d-ae0d-76156aa87551 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Took 14.44 seconds to snapshot the instance on the hypervisor. [ 795.811019] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5227f46f-1439-ad5e-8831-ad0bce8cd044, 'name': SearchDatastore_Task, 'duration_secs': 0.022168} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.811178] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-472ab966-120c-4a0e-9287-4f6f1da0e997 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.819130] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 795.819130] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b26a14-16bf-0889-516a-4482cb020d18" [ 795.819130] env[63028]: _type = "Task" [ 795.819130] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.831058] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b26a14-16bf-0889-516a-4482cb020d18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.939385] env[63028]: DEBUG nova.network.neutron [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updated VIF entry in instance network info cache for port 7dd5be5d-a88d-4dcd-a42d-7842895207f7. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 795.939385] env[63028]: DEBUG nova.network.neutron [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance_info_cache with network_info: [{"id": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "address": "fa:16:3e:45:d9:fe", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dd5be5d-a8", "ovs_interfaceid": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.040282] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8e94b42e-cf5b-4db5-8cf9-7f06a8d06718 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.040282] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.891s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.040282] env[63028]: DEBUG nova.objects.instance [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Lazy-loading 'resources' on Instance uuid 8f6beda6-0fc6-4d85-9f27-f4248adda8f3 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 796.309097] env[63028]: DEBUG nova.compute.manager [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 796.309097] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.309097] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a40eb39-e8ec-483c-89a0-7b3412c003e2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.326077] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.332493] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3fa49cc-112a-4e46-badb-2b184d14cac1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.347891] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b26a14-16bf-0889-516a-4482cb020d18, 'name': SearchDatastore_Task, 'duration_secs': 0.017019} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.347891] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.347891] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 52b19182-a7e2-4461-b4eb-e6cd8a30024e/52b19182-a7e2-4461-b4eb-e6cd8a30024e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 796.348595] env[63028]: DEBUG oslo_vmware.api [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 796.348595] env[63028]: value = "task-2735500" [ 796.348595] env[63028]: _type = "Task" [ 796.348595] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.348595] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e77a7f40-4c2c-45ee-89bf-4e27796463bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.378245] env[63028]: DEBUG oslo_vmware.api [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735500, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.378245] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 796.378245] env[63028]: value = "task-2735501" [ 796.378245] env[63028]: _type = "Task" [ 796.378245] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.382587] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735501, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.409468] env[63028]: DEBUG nova.compute.manager [None req-33c2d3e2-8c5f-498e-bb96-3a2d78173ced tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Found 1 images (rotation: 2) {{(pid=63028) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 796.441263] env[63028]: DEBUG oslo_concurrency.lockutils [req-832afd9d-a12c-42f6-9d38-7c877aa66894 req-56257bb0-b17b-4087-976e-724eff740e3a service nova] Releasing lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.864546] env[63028]: DEBUG oslo_vmware.api [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735500, 'name': PowerOffVM_Task, 'duration_secs': 0.237965} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.868028] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.868028] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 796.869497] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cee1b774-5c7a-4907-94ac-b8265ee25cdc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.883788] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735501, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.959318] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 796.960296] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 796.960296] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Deleting the datastore file [datastore1] 1af19279-e75b-4ec5-91f1-a0a101b229b2 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.960296] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5664affa-7187-411c-b5a5-09d104356506 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.975449] env[63028]: DEBUG oslo_vmware.api [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 796.975449] env[63028]: value = "task-2735503" [ 796.975449] env[63028]: _type = "Task" [ 796.975449] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.988394] env[63028]: DEBUG oslo_vmware.api [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735503, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.204498] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ee2d87-aa3c-4108-8621-89ed56993549 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.215410] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8e6c62-f856-4607-ad91-6ef4f4d3715d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.252050] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d110366-e904-4d0f-93d9-e6ce75a31d0a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.261033] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2244ce74-8645-4e23-a5e5-5fb11b928f56 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.278200] env[63028]: DEBUG nova.compute.provider_tree [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.378769] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735501, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.641569} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.379065] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 52b19182-a7e2-4461-b4eb-e6cd8a30024e/52b19182-a7e2-4461-b4eb-e6cd8a30024e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 797.379278] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 797.379573] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ee3f348-a731-4b9a-9039-ffdb5e359e6e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.392023] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 797.392023] env[63028]: value = "task-2735504" [ 797.392023] env[63028]: _type = "Task" [ 797.392023] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.400842] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735504, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.486910] env[63028]: DEBUG oslo_vmware.api [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735503, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.306004} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.488623] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.488623] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 797.488623] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.488623] env[63028]: INFO nova.compute.manager [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Took 1.18 seconds to destroy the instance on the hypervisor. [ 797.488623] env[63028]: DEBUG oslo.service.loopingcall [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.488623] env[63028]: DEBUG nova.compute.manager [-] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 797.488623] env[63028]: DEBUG nova.network.neutron [-] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 797.614989] env[63028]: DEBUG nova.compute.manager [req-1ee80696-d504-4cd3-b652-8cd133523e1c req-0e717f39-e741-4d74-bd31-87e52b8f7db9 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Received event network-changed-2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 797.615196] env[63028]: DEBUG nova.compute.manager [req-1ee80696-d504-4cd3-b652-8cd133523e1c req-0e717f39-e741-4d74-bd31-87e52b8f7db9 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Refreshing instance network info cache due to event network-changed-2e2d8403-826c-4e24-ba3c-123d444d1fdc. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 797.615411] env[63028]: DEBUG oslo_concurrency.lockutils [req-1ee80696-d504-4cd3-b652-8cd133523e1c req-0e717f39-e741-4d74-bd31-87e52b8f7db9 service nova] Acquiring lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.615558] env[63028]: DEBUG oslo_concurrency.lockutils [req-1ee80696-d504-4cd3-b652-8cd133523e1c req-0e717f39-e741-4d74-bd31-87e52b8f7db9 service nova] Acquired lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.615721] env[63028]: DEBUG nova.network.neutron [req-1ee80696-d504-4cd3-b652-8cd133523e1c req-0e717f39-e741-4d74-bd31-87e52b8f7db9 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Refreshing network info cache for port 2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 797.688167] env[63028]: DEBUG nova.compute.manager [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 797.689143] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf13994-cabd-4297-8eb9-261967a8ef65 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.781910] env[63028]: DEBUG nova.scheduler.client.report [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 797.905117] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735504, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076597} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.905904] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 797.907084] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70ce81c-c2c2-4df4-8f8e-0f7860fa9e42 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.938095] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 52b19182-a7e2-4461-b4eb-e6cd8a30024e/52b19182-a7e2-4461-b4eb-e6cd8a30024e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.939104] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3feeb93-5110-460a-b74b-93d46c538c3a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.963892] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 797.963892] env[63028]: value = "task-2735505" [ 797.963892] env[63028]: _type = "Task" [ 797.963892] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.973764] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735505, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.208244] env[63028]: INFO nova.compute.manager [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] instance snapshotting [ 798.208877] env[63028]: DEBUG nova.objects.instance [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'flavor' on Instance uuid c06813c4-472d-4bf9-84ec-0d01306bcd48 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.290315] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.250s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.293976] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.980s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.296076] env[63028]: INFO nova.compute.claims [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.328205] env[63028]: INFO nova.scheduler.client.report [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Deleted allocations for instance 8f6beda6-0fc6-4d85-9f27-f4248adda8f3 [ 798.475619] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735505, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.508199] env[63028]: DEBUG nova.network.neutron [-] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.717404] env[63028]: DEBUG nova.network.neutron [req-1ee80696-d504-4cd3-b652-8cd133523e1c req-0e717f39-e741-4d74-bd31-87e52b8f7db9 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updated VIF entry in instance network info cache for port 2e2d8403-826c-4e24-ba3c-123d444d1fdc. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 798.717823] env[63028]: DEBUG nova.network.neutron [req-1ee80696-d504-4cd3-b652-8cd133523e1c req-0e717f39-e741-4d74-bd31-87e52b8f7db9 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updating instance_info_cache with network_info: [{"id": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "address": "fa:16:3e:09:d0:a2", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d8403-82", "ovs_interfaceid": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.719986] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9db93a7-832d-4ec0-80aa-de3638d7d0d4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.756999] env[63028]: DEBUG nova.compute.manager [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 798.758119] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a726e0-6ca2-485c-a5b6-633a93a8bdb1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.764508] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdd4822-357c-4455-b09e-dafb4d300e0d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.841047] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9433184-003c-41fe-b52d-bd01162a28cf tempest-ServersTestManualDisk-1597241850 tempest-ServersTestManualDisk-1597241850-project-member] Lock "8f6beda6-0fc6-4d85-9f27-f4248adda8f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.326s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.976792] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735505, 'name': ReconfigVM_Task, 'duration_secs': 0.568363} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.977653] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 52b19182-a7e2-4461-b4eb-e6cd8a30024e/52b19182-a7e2-4461-b4eb-e6cd8a30024e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 798.978415] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57d775c8-3d85-400d-b311-f4d8200952df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.987091] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 798.987091] env[63028]: value = "task-2735506" [ 798.987091] env[63028]: _type = "Task" [ 798.987091] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.002760] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735506, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.017617] env[63028]: INFO nova.compute.manager [-] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Took 1.53 seconds to deallocate network for instance. [ 799.232607] env[63028]: DEBUG oslo_concurrency.lockutils [req-1ee80696-d504-4cd3-b652-8cd133523e1c req-0e717f39-e741-4d74-bd31-87e52b8f7db9 service nova] Releasing lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.276372] env[63028]: INFO nova.compute.manager [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] instance snapshotting [ 799.280080] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2decd2-16a9-4d2f-8246-d90921742e59 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.286888] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 799.290046] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-564074d4-6b07-4f88-a1ac-56c77336e173 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.330710] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd952c3-dfa5-4f46-aa5a-9e1597dd0d6a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.334691] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 799.334691] env[63028]: value = "task-2735507" [ 799.334691] env[63028]: _type = "Task" [ 799.334691] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.357230] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735507, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.501717] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735506, 'name': Rename_Task, 'duration_secs': 0.209463} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.502129] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 799.503026] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37ff5852-1609-460f-ab51-f2a2f3632e03 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.513172] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 799.513172] env[63028]: value = "task-2735508" [ 799.513172] env[63028]: _type = "Task" [ 799.513172] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.522825] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735508, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.526244] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.656256] env[63028]: DEBUG nova.compute.manager [req-e8c7b91c-2047-4b76-a6b1-030a3711c93b req-6bd6d6a0-c9db-4621-b537-cd373102bbac service nova] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Received event network-vif-deleted-1dbc74bf-5582-4e9b-a07e-8ba016f027e5 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 799.859030] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 799.859347] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735507, 'name': CreateSnapshot_Task, 'duration_secs': 0.534943} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.862901] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-166f485f-018b-43dc-bf23-88e290d1d067 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.867568] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 799.868730] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e3d376-2a73-4262-86e0-ba74ce501dc8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.890157] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 799.890157] env[63028]: value = "task-2735509" [ 799.890157] env[63028]: _type = "Task" [ 799.890157] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.914645] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735509, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.920407] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346aa0ba-0b4a-4e4f-a08f-9a61dfbb8f70 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.930408] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eca8786-9979-4280-bf37-ae12e8014354 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.969305] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fba5fb1-d08e-4d50-bcc7-25c3f4a1f936 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.979778] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b114b5-a731-40fa-be61-e0839bdaf5ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.999192] env[63028]: DEBUG nova.compute.provider_tree [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.032308] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735508, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.395462] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 800.395938] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c58ae6fd-5f67-4a61-8e13-98589696e03e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.408356] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735509, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.409874] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 800.409874] env[63028]: value = "task-2735510" [ 800.409874] env[63028]: _type = "Task" [ 800.409874] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.418628] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735510, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.503288] env[63028]: DEBUG nova.scheduler.client.report [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 800.529389] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735508, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.908910] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735509, 'name': CreateSnapshot_Task, 'duration_secs': 0.823035} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.909230] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 800.910119] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d2766c-7c50-40e6-9f67-8516e0789a98 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.965118] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735510, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.009550] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.011075] env[63028]: DEBUG nova.compute.manager [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 801.016245] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.399s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.017870] env[63028]: INFO nova.compute.claims [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 801.037507] env[63028]: DEBUG oslo_vmware.api [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735508, 'name': PowerOnVM_Task, 'duration_secs': 1.110391} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.038240] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 801.038322] env[63028]: INFO nova.compute.manager [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Took 12.76 seconds to spawn the instance on the hypervisor. [ 801.038541] env[63028]: DEBUG nova.compute.manager [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 801.039460] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1363e0c3-25c2-4987-888e-bb2171fd9040 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.446498] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 801.446498] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735510, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.446498] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ec5abe0e-dc48-4337-be2c-0ef3444bd7e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.457116] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 801.457116] env[63028]: value = "task-2735511" [ 801.457116] env[63028]: _type = "Task" [ 801.457116] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.469213] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735511, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.526158] env[63028]: DEBUG nova.compute.utils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 801.526158] env[63028]: DEBUG nova.compute.manager [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 801.526158] env[63028]: DEBUG nova.network.neutron [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 801.568941] env[63028]: INFO nova.compute.manager [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Took 47.75 seconds to build instance. [ 801.599917] env[63028]: DEBUG nova.policy [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc93056b710a46e2b2f3485780719323', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '178b95ba550d453db2b9868e72a8c93f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 801.659886] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ede4e1-188f-f9db-77ec-c6bf10e2a7d2/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 801.661087] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad34ad99-e938-482a-812f-5f07104bca0d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.670158] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ede4e1-188f-f9db-77ec-c6bf10e2a7d2/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 801.670364] env[63028]: ERROR oslo_vmware.rw_handles [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ede4e1-188f-f9db-77ec-c6bf10e2a7d2/disk-0.vmdk due to incomplete transfer. [ 801.670620] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-db413f33-273a-4009-8f17-767aa5a79b1d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.681540] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ede4e1-188f-f9db-77ec-c6bf10e2a7d2/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 801.681781] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Uploaded image c9823cf3-c81e-4b18-855a-a01f46d8c790 to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 801.683726] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 801.683726] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c0b80e19-6226-45c7-8474-4e1c7f305032 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.691297] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 801.691297] env[63028]: value = "task-2735512" [ 801.691297] env[63028]: _type = "Task" [ 801.691297] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.702355] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735512, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.934059] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735510, 'name': CloneVM_Task, 'duration_secs': 1.430213} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.934059] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Created linked-clone VM from snapshot [ 801.934390] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e26c03-e7b6-4e9b-bf06-029395dba5f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.943787] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Uploading image 6c9341f0-34a7-4319-9418-6e0f4692185e {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 801.968528] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735511, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.986269] env[63028]: DEBUG oslo_vmware.rw_handles [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 801.986269] env[63028]: value = "vm-550748" [ 801.986269] env[63028]: _type = "VirtualMachine" [ 801.986269] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 801.986648] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-01b96655-2cfe-403c-9e9d-cda4e47440ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.995948] env[63028]: DEBUG oslo_vmware.rw_handles [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lease: (returnval){ [ 801.995948] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5225656e-efbb-c139-fd55-229ecf616277" [ 801.995948] env[63028]: _type = "HttpNfcLease" [ 801.995948] env[63028]: } obtained for exporting VM: (result){ [ 801.995948] env[63028]: value = "vm-550748" [ 801.995948] env[63028]: _type = "VirtualMachine" [ 801.995948] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 801.996276] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the lease: (returnval){ [ 801.996276] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5225656e-efbb-c139-fd55-229ecf616277" [ 801.996276] env[63028]: _type = "HttpNfcLease" [ 801.996276] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 802.005328] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 802.005328] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5225656e-efbb-c139-fd55-229ecf616277" [ 802.005328] env[63028]: _type = "HttpNfcLease" [ 802.005328] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 802.031552] env[63028]: DEBUG nova.compute.utils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 802.070559] env[63028]: DEBUG oslo_concurrency.lockutils [None req-efa009e1-31b4-4810-870c-cc093d5cbf71 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.667s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.211557] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735512, 'name': Destroy_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.223943] env[63028]: DEBUG nova.network.neutron [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Successfully created port: 66cd0102-9651-45e1-8a38-f65e2f7dd800 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 802.475148] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735511, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.511033] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 802.511033] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5225656e-efbb-c139-fd55-229ecf616277" [ 802.511033] env[63028]: _type = "HttpNfcLease" [ 802.511033] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 802.511991] env[63028]: DEBUG oslo_vmware.rw_handles [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 802.511991] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5225656e-efbb-c139-fd55-229ecf616277" [ 802.511991] env[63028]: _type = "HttpNfcLease" [ 802.511991] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 802.513918] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157394d9-a62f-4bd5-a777-989f70cd4bde {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.528176] env[63028]: DEBUG oslo_vmware.rw_handles [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527fed8f-1a0d-0cd0-9a39-5e111dd049eb/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 802.528442] env[63028]: DEBUG oslo_vmware.rw_handles [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527fed8f-1a0d-0cd0-9a39-5e111dd049eb/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 802.592785] env[63028]: DEBUG nova.compute.manager [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 802.595808] env[63028]: DEBUG nova.compute.manager [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 802.647199] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-453634e6-6ce0-4d9f-9411-51af7798f4ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.708099] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735512, 'name': Destroy_Task, 'duration_secs': 0.803839} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.708628] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Destroyed the VM [ 802.708628] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 802.708834] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0028c236-7e69-437f-9904-f20b3acb9e7b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.713267] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269befbe-3aee-4d64-9183-b31238ea6f59 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.718114] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 802.718114] env[63028]: value = "task-2735514" [ 802.718114] env[63028]: _type = "Task" [ 802.718114] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.725225] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58020d99-9b76-4ed9-9ef8-9c43f55307d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.734943] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735514, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.767398] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9adf72cf-8d8f-4710-9ed3-2124d9133536 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.779066] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af4463c-a0c5-4ca1-bd5f-6be654697669 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.798948] env[63028]: DEBUG nova.compute.provider_tree [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.972731] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735511, 'name': CloneVM_Task} progress is 95%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.130363] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.235108] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735514, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.304136] env[63028]: DEBUG nova.scheduler.client.report [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 803.472912] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735511, 'name': CloneVM_Task, 'duration_secs': 1.822073} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.473353] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Created linked-clone VM from snapshot [ 803.474534] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a4e5f1-9c56-45e7-975b-ac4d9ed04aab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.487722] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Uploading image 411d94f7-582d-4070-b857-b6924605fda8 {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 803.525217] env[63028]: DEBUG oslo_vmware.rw_handles [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 803.525217] env[63028]: value = "vm-550750" [ 803.525217] env[63028]: _type = "VirtualMachine" [ 803.525217] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 803.532758] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ae4e1188-b779-430b-bb1b-816b60f4ba60 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.550081] env[63028]: DEBUG oslo_vmware.rw_handles [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lease: (returnval){ [ 803.550081] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5213c50a-f577-edf1-8438-92461be0aa4c" [ 803.550081] env[63028]: _type = "HttpNfcLease" [ 803.550081] env[63028]: } obtained for exporting VM: (result){ [ 803.550081] env[63028]: value = "vm-550750" [ 803.550081] env[63028]: _type = "VirtualMachine" [ 803.550081] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 803.553878] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the lease: (returnval){ [ 803.553878] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5213c50a-f577-edf1-8438-92461be0aa4c" [ 803.553878] env[63028]: _type = "HttpNfcLease" [ 803.553878] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 803.562917] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 803.562917] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5213c50a-f577-edf1-8438-92461be0aa4c" [ 803.562917] env[63028]: _type = "HttpNfcLease" [ 803.562917] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 803.605588] env[63028]: DEBUG nova.compute.manager [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 803.645126] env[63028]: DEBUG nova.virt.hardware [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:54:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1994684944',id=32,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1250738434',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 803.646394] env[63028]: DEBUG nova.virt.hardware [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 803.646565] env[63028]: DEBUG nova.virt.hardware [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 803.646753] env[63028]: DEBUG nova.virt.hardware [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 803.646900] env[63028]: DEBUG nova.virt.hardware [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 803.647072] env[63028]: DEBUG nova.virt.hardware [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 803.647312] env[63028]: DEBUG nova.virt.hardware [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 803.647435] env[63028]: DEBUG nova.virt.hardware [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 803.647606] env[63028]: DEBUG nova.virt.hardware [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 803.648013] env[63028]: DEBUG nova.virt.hardware [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 803.648013] env[63028]: DEBUG nova.virt.hardware [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 803.649191] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751e019f-8dde-4bbd-a792-661cd97d2bbe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.668575] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7dca5f-eda8-40fb-a87e-cb120a4f79e5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.732031] env[63028]: DEBUG oslo_vmware.api [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735514, 'name': RemoveSnapshot_Task, 'duration_secs': 0.782167} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.732210] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 803.736192] env[63028]: INFO nova.compute.manager [None req-dc38d46c-678e-438a-bcbc-898ad4f285df tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Took 21.89 seconds to snapshot the instance on the hypervisor. [ 803.810482] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.810984] env[63028]: DEBUG nova.compute.manager [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 803.817496] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.174s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.817496] env[63028]: DEBUG nova.objects.instance [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lazy-loading 'resources' on Instance uuid 50e4934b-b9b1-4887-b5d1-95a37fbf4c41 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 803.955030] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "9773ad95-1894-471d-8020-c7952eac4be4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.955339] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "9773ad95-1894-471d-8020-c7952eac4be4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.051308] env[63028]: DEBUG nova.compute.manager [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Stashing vm_state: active {{(pid=63028) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 804.066019] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 804.066019] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5213c50a-f577-edf1-8438-92461be0aa4c" [ 804.066019] env[63028]: _type = "HttpNfcLease" [ 804.066019] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 804.066019] env[63028]: DEBUG oslo_vmware.rw_handles [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 804.066019] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5213c50a-f577-edf1-8438-92461be0aa4c" [ 804.066019] env[63028]: _type = "HttpNfcLease" [ 804.066019] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 804.066019] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75e8f5d-61f4-4951-bda4-180fe7b32011 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.076110] env[63028]: DEBUG oslo_vmware.rw_handles [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52642f47-c460-ed0e-2fba-b20d88b188ea/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 804.076754] env[63028]: DEBUG oslo_vmware.rw_handles [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52642f47-c460-ed0e-2fba-b20d88b188ea/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 804.167748] env[63028]: DEBUG nova.compute.manager [req-32908c96-937b-4af1-b03f-f6ab2f9f1efd req-a1b4b30f-db38-4eee-b123-2652dd017eee service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Received event network-vif-plugged-66cd0102-9651-45e1-8a38-f65e2f7dd800 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 804.168507] env[63028]: DEBUG oslo_concurrency.lockutils [req-32908c96-937b-4af1-b03f-f6ab2f9f1efd req-a1b4b30f-db38-4eee-b123-2652dd017eee service nova] Acquiring lock "a1d00736-1a8d-46e0-9358-46e848b94797-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.168811] env[63028]: DEBUG oslo_concurrency.lockutils [req-32908c96-937b-4af1-b03f-f6ab2f9f1efd req-a1b4b30f-db38-4eee-b123-2652dd017eee service nova] Lock "a1d00736-1a8d-46e0-9358-46e848b94797-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.169516] env[63028]: DEBUG oslo_concurrency.lockutils [req-32908c96-937b-4af1-b03f-f6ab2f9f1efd req-a1b4b30f-db38-4eee-b123-2652dd017eee service nova] Lock "a1d00736-1a8d-46e0-9358-46e848b94797-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.169903] env[63028]: DEBUG nova.compute.manager [req-32908c96-937b-4af1-b03f-f6ab2f9f1efd req-a1b4b30f-db38-4eee-b123-2652dd017eee service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] No waiting events found dispatching network-vif-plugged-66cd0102-9651-45e1-8a38-f65e2f7dd800 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 804.170195] env[63028]: WARNING nova.compute.manager [req-32908c96-937b-4af1-b03f-f6ab2f9f1efd req-a1b4b30f-db38-4eee-b123-2652dd017eee service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Received unexpected event network-vif-plugged-66cd0102-9651-45e1-8a38-f65e2f7dd800 for instance with vm_state building and task_state spawning. [ 804.183035] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-199ef07a-8d44-41c4-9e63-8afc6f2017c7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.318084] env[63028]: DEBUG nova.compute.utils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 804.323842] env[63028]: DEBUG nova.compute.manager [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 804.323842] env[63028]: DEBUG nova.network.neutron [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 804.399522] env[63028]: DEBUG nova.policy [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88001cd873b841918c7849408e98ac7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98d3fdfda1694b2f9f5985831ea77a21', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 804.596243] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.748690] env[63028]: DEBUG nova.network.neutron [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Successfully updated port: 66cd0102-9651-45e1-8a38-f65e2f7dd800 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 804.824188] env[63028]: DEBUG nova.compute.manager [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 805.009617] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a319c006-434e-47bf-aeff-8d474ae80e66 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.014440] env[63028]: DEBUG nova.network.neutron [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Successfully created port: 296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 805.022143] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ddf804-fb1b-47d2-95b8-d42b75c5c62e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.061488] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b74fe0-0e4e-44c5-9f67-0cf29a773235 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.070982] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06588468-8a64-4bf3-a50b-3d83bcf207ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.106451] env[63028]: DEBUG nova.compute.provider_tree [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.252855] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "refresh_cache-a1d00736-1a8d-46e0-9358-46e848b94797" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.252855] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquired lock "refresh_cache-a1d00736-1a8d-46e0-9358-46e848b94797" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.252855] env[63028]: DEBUG nova.network.neutron [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.610492] env[63028]: DEBUG nova.scheduler.client.report [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 805.811629] env[63028]: DEBUG nova.network.neutron [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.841466] env[63028]: DEBUG nova.compute.manager [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 806.050507] env[63028]: DEBUG nova.network.neutron [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Updating instance_info_cache with network_info: [{"id": "66cd0102-9651-45e1-8a38-f65e2f7dd800", "address": "fa:16:3e:ff:ab:ba", "network": {"id": "bd3f74f8-d12b-4d2e-9aa9-ca1c9e766bf4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1073484567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "178b95ba550d453db2b9868e72a8c93f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66cd0102-96", "ovs_interfaceid": "66cd0102-9651-45e1-8a38-f65e2f7dd800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.115978] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.302s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.121331] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.407s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.122930] env[63028]: INFO nova.compute.claims [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 806.155665] env[63028]: INFO nova.scheduler.client.report [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted allocations for instance 50e4934b-b9b1-4887-b5d1-95a37fbf4c41 [ 806.558547] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Releasing lock "refresh_cache-a1d00736-1a8d-46e0-9358-46e848b94797" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.558854] env[63028]: DEBUG nova.compute.manager [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Instance network_info: |[{"id": "66cd0102-9651-45e1-8a38-f65e2f7dd800", "address": "fa:16:3e:ff:ab:ba", "network": {"id": "bd3f74f8-d12b-4d2e-9aa9-ca1c9e766bf4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1073484567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "178b95ba550d453db2b9868e72a8c93f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66cd0102-96", "ovs_interfaceid": "66cd0102-9651-45e1-8a38-f65e2f7dd800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 806.561380] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:ab:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66cd0102-9651-45e1-8a38-f65e2f7dd800', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 806.574026] env[63028]: DEBUG oslo.service.loopingcall [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.574026] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 806.574026] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7ce6151-b088-402e-a063-d5895da42258 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.609717] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 806.609717] env[63028]: value = "task-2735516" [ 806.609717] env[63028]: _type = "Task" [ 806.609717] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.619497] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735516, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.667668] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a4fe11b-9e59-4c76-ac53-0f5c85045de9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "50e4934b-b9b1-4887-b5d1-95a37fbf4c41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.533s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.772224] env[63028]: DEBUG nova.network.neutron [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Successfully updated port: 296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 807.122529] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735516, 'name': CreateVM_Task, 'duration_secs': 0.439657} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.122769] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 807.123561] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.123765] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.124139] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 807.124461] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecd6f3fd-62e4-4921-bbb1-9c80e930facf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.135835] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 807.135835] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d8cc50-b249-70ba-733f-34b462e8c21e" [ 807.135835] env[63028]: _type = "Task" [ 807.135835] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.145020] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d8cc50-b249-70ba-733f-34b462e8c21e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.279165] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.279319] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.279488] env[63028]: DEBUG nova.network.neutron [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.623893] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e435eac-e780-4724-af33-e1ab432fc6f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.632637] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed253511-7b33-4c45-9baa-94937b4a858c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.645955] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d8cc50-b249-70ba-733f-34b462e8c21e, 'name': SearchDatastore_Task, 'duration_secs': 0.014717} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.674397] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.674612] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 807.674916] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.675084] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.675333] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 807.676094] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92c1bc21-2705-4a5a-bd57-aa79323fd1e9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.679013] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60746c2d-bb1c-45ca-9b8c-ed35cf90d5a2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.689426] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08381414-1883-456c-8fcc-3553d6799754 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.694568] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 807.694764] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 807.695880] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60e74fce-ad94-4c11-bca9-8313586e1f8a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.705971] env[63028]: DEBUG nova.compute.provider_tree [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.711231] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 807.711231] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f10318-c100-5a9c-d4e8-15c8ad4910d9" [ 807.711231] env[63028]: _type = "Task" [ 807.711231] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.720478] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f10318-c100-5a9c-d4e8-15c8ad4910d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.821895] env[63028]: DEBUG nova.network.neutron [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.966419] env[63028]: DEBUG nova.network.neutron [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Updating instance_info_cache with network_info: [{"id": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "address": "fa:16:3e:bc:87:07", "network": {"id": "37013470-5bda-41a6-826c-03ac0d423c85", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1214578902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d3fdfda1694b2f9f5985831ea77a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap296dfd9e-84", "ovs_interfaceid": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.212209] env[63028]: DEBUG nova.scheduler.client.report [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 808.230316] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f10318-c100-5a9c-d4e8-15c8ad4910d9, 'name': SearchDatastore_Task, 'duration_secs': 0.018005} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.231819] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f9601e4-08ae-4d09-b011-70727910a575 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.238879] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 808.238879] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c82c48-aeb7-a692-f3eb-168703704ab6" [ 808.238879] env[63028]: _type = "Task" [ 808.238879] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.248435] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c82c48-aeb7-a692-f3eb-168703704ab6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.469385] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Releasing lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.469906] env[63028]: DEBUG nova.compute.manager [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Instance network_info: |[{"id": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "address": "fa:16:3e:bc:87:07", "network": {"id": "37013470-5bda-41a6-826c-03ac0d423c85", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1214578902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d3fdfda1694b2f9f5985831ea77a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap296dfd9e-84", "ovs_interfaceid": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 808.721241] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.600s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.722042] env[63028]: DEBUG nova.compute.manager [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 808.724918] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.769s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.726496] env[63028]: INFO nova.compute.claims [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.750959] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c82c48-aeb7-a692-f3eb-168703704ab6, 'name': SearchDatastore_Task, 'duration_secs': 0.017916} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.751758] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.752052] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] a1d00736-1a8d-46e0-9358-46e848b94797/a1d00736-1a8d-46e0-9358-46e848b94797.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 808.752390] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6053f04b-0fa4-465a-b580-e2e1fe7f3916 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.761875] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 808.761875] env[63028]: value = "task-2735517" [ 808.761875] env[63028]: _type = "Task" [ 808.761875] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.770843] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735517, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.236397] env[63028]: DEBUG nova.compute.utils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 809.238104] env[63028]: DEBUG nova.compute.manager [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 809.238104] env[63028]: DEBUG nova.network.neutron [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 809.273922] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735517, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.284473] env[63028]: DEBUG nova.policy [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3cd3e750e524c4d99eaa604eeb6856c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fcbd7d1edc4403691c5e41ba033311d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 809.572487] env[63028]: DEBUG nova.network.neutron [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Successfully created port: a24d4572-d230-46e3-82e9-72efb20f6178 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 809.742996] env[63028]: DEBUG nova.compute.manager [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 809.779175] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735517, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.236036] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af28f02b-352d-4931-a927-07d522155671 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.245407] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6eeb8ad-ae99-4b3b-94a8-6c78d0b715a1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.289148] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5348f9c5-8d90-40cb-b676-0a22f14d6637 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.302624] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cec95a5-b909-4848-84b7-1742f3000fa2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.306857] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735517, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.529533} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.307500] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] a1d00736-1a8d-46e0-9358-46e848b94797/a1d00736-1a8d-46e0-9358-46e848b94797.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 810.307732] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 810.308380] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68575597-3c3e-4e5c-8d3e-065155c32918 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.319776] env[63028]: DEBUG nova.compute.provider_tree [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.323234] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 810.323234] env[63028]: value = "task-2735518" [ 810.323234] env[63028]: _type = "Task" [ 810.323234] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.333670] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735518, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.637051] env[63028]: DEBUG nova.virt.hardware [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 810.637412] env[63028]: DEBUG nova.virt.hardware [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 810.637612] env[63028]: DEBUG nova.virt.hardware [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 810.637827] env[63028]: DEBUG nova.virt.hardware [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 810.638020] env[63028]: DEBUG nova.virt.hardware [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 810.638208] env[63028]: DEBUG nova.virt.hardware [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 810.638456] env[63028]: DEBUG nova.virt.hardware [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 810.638647] env[63028]: DEBUG nova.virt.hardware [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 810.638868] env[63028]: DEBUG nova.virt.hardware [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 810.639121] env[63028]: DEBUG nova.virt.hardware [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 810.639375] env[63028]: DEBUG nova.virt.hardware [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 810.641771] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9821e68b-901f-4e20-a5dd-9413c7241679 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.651683] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908c4fd9-8125-49dd-82f3-0a056020b54a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.666598] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:87:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '296dfd9e-84e1-4ea8-bd17-28920a6a048b', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 810.674016] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Creating folder: Project (98d3fdfda1694b2f9f5985831ea77a21). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 810.674373] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f826ec80-5a43-45b1-bad7-198dfea305b6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.689044] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Created folder: Project (98d3fdfda1694b2f9f5985831ea77a21) in parent group-v550570. [ 810.689044] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Creating folder: Instances. Parent ref: group-v550752. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 810.693017] env[63028]: DEBUG oslo_vmware.rw_handles [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527fed8f-1a0d-0cd0-9a39-5e111dd049eb/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 810.693017] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7234758b-0056-4a50-8b0a-c4d896f0e25e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.693938] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668f3f4c-1b94-49f1-a9e0-fc85a201414b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.700207] env[63028]: DEBUG oslo_vmware.rw_handles [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527fed8f-1a0d-0cd0-9a39-5e111dd049eb/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 810.700362] env[63028]: ERROR oslo_vmware.rw_handles [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527fed8f-1a0d-0cd0-9a39-5e111dd049eb/disk-0.vmdk due to incomplete transfer. [ 810.700581] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d6e52153-0db5-48f4-90b0-fe562657de07 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.705266] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Created folder: Instances in parent group-v550752. [ 810.705538] env[63028]: DEBUG oslo.service.loopingcall [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 810.705756] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 810.706088] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb5afc43-7855-4c7c-9239-42f0b59fba7c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.722594] env[63028]: DEBUG oslo_vmware.rw_handles [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527fed8f-1a0d-0cd0-9a39-5e111dd049eb/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 810.722780] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Uploaded image 6c9341f0-34a7-4319-9418-6e0f4692185e to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 810.724991] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 810.725703] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8e487a61-0ccb-46e1-a153-a8a88e7f5cb2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.731494] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 810.731494] env[63028]: value = "task-2735521" [ 810.731494] env[63028]: _type = "Task" [ 810.731494] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.732865] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 810.732865] env[63028]: value = "task-2735522" [ 810.732865] env[63028]: _type = "Task" [ 810.732865] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.744779] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735521, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.748242] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735522, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.787220] env[63028]: DEBUG nova.compute.manager [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 810.809592] env[63028]: DEBUG nova.virt.hardware [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 810.809905] env[63028]: DEBUG nova.virt.hardware [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 810.810128] env[63028]: DEBUG nova.virt.hardware [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 810.810374] env[63028]: DEBUG nova.virt.hardware [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 810.810586] env[63028]: DEBUG nova.virt.hardware [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 810.810749] env[63028]: DEBUG nova.virt.hardware [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 810.810975] env[63028]: DEBUG nova.virt.hardware [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 810.811156] env[63028]: DEBUG nova.virt.hardware [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 810.811329] env[63028]: DEBUG nova.virt.hardware [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 810.811494] env[63028]: DEBUG nova.virt.hardware [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 810.811691] env[63028]: DEBUG nova.virt.hardware [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 810.812745] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c9ff2a-8b7f-41f8-bb75-f833d7acbd81 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.824205] env[63028]: DEBUG nova.scheduler.client.report [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 810.829210] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e733e39d-73a5-4750-b021-db9e9adf8802 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.842974] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735518, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095537} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.853064] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 810.854225] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518e12da-e186-4f34-b1cb-d9cc3b56c90f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.880310] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] a1d00736-1a8d-46e0-9358-46e848b94797/a1d00736-1a8d-46e0-9358-46e848b94797.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 810.881347] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17e32cba-0c6c-403e-a619-48d4832af17b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.903254] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 810.903254] env[63028]: value = "task-2735523" [ 810.903254] env[63028]: _type = "Task" [ 810.903254] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.913716] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735523, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.231383] env[63028]: DEBUG nova.network.neutron [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Successfully updated port: a24d4572-d230-46e3-82e9-72efb20f6178 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.246249] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735522, 'name': Destroy_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.250085] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735521, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.327097] env[63028]: DEBUG nova.compute.manager [req-c23de81d-fad9-4b7a-b37b-c64b324372f3 req-16a70dcf-df18-4250-a58c-66c3dc85efeb service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Received event network-changed-66cd0102-9651-45e1-8a38-f65e2f7dd800 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 811.327304] env[63028]: DEBUG nova.compute.manager [req-c23de81d-fad9-4b7a-b37b-c64b324372f3 req-16a70dcf-df18-4250-a58c-66c3dc85efeb service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Refreshing instance network info cache due to event network-changed-66cd0102-9651-45e1-8a38-f65e2f7dd800. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 811.327524] env[63028]: DEBUG oslo_concurrency.lockutils [req-c23de81d-fad9-4b7a-b37b-c64b324372f3 req-16a70dcf-df18-4250-a58c-66c3dc85efeb service nova] Acquiring lock "refresh_cache-a1d00736-1a8d-46e0-9358-46e848b94797" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.327668] env[63028]: DEBUG oslo_concurrency.lockutils [req-c23de81d-fad9-4b7a-b37b-c64b324372f3 req-16a70dcf-df18-4250-a58c-66c3dc85efeb service nova] Acquired lock "refresh_cache-a1d00736-1a8d-46e0-9358-46e848b94797" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.327829] env[63028]: DEBUG nova.network.neutron [req-c23de81d-fad9-4b7a-b37b-c64b324372f3 req-16a70dcf-df18-4250-a58c-66c3dc85efeb service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Refreshing network info cache for port 66cd0102-9651-45e1-8a38-f65e2f7dd800 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 811.335069] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.335640] env[63028]: DEBUG nova.compute.manager [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 811.340037] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 34.459s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.340126] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.340448] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63028) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 811.340798] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.042s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.341043] env[63028]: DEBUG nova.objects.instance [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lazy-loading 'resources' on Instance uuid af87f1a5-b413-4b26-be91-474ad1f73df8 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 811.343593] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a904d38b-544c-4678-947a-25e66c29062f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.354426] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd5ad33-cba6-48f7-a4b5-815aa6856787 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.372018] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30dc9c5-a61c-4834-ab58-50056cf3b14b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.379158] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdaf74bb-7f4f-4504-9dfa-63df521eea22 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.412925] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178721MB free_disk=110GB free_vcpus=48 pci_devices=None {{(pid=63028) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 811.413097] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.422633] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735523, 'name': ReconfigVM_Task, 'duration_secs': 0.458494} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.422957] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Reconfigured VM instance instance-00000038 to attach disk [datastore1] a1d00736-1a8d-46e0-9358-46e848b94797/a1d00736-1a8d-46e0-9358-46e848b94797.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 811.423395] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=63028) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 811.424031] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-3d1493fe-1520-49b9-908a-cb279f2b1619 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.432281] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 811.432281] env[63028]: value = "task-2735524" [ 811.432281] env[63028]: _type = "Task" [ 811.432281] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.441749] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735524, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.512230] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.512424] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.560560] env[63028]: DEBUG oslo_vmware.rw_handles [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52642f47-c460-ed0e-2fba-b20d88b188ea/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 811.561707] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad1046c-1648-4053-ae22-d0cca0d925da {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.572020] env[63028]: DEBUG oslo_vmware.rw_handles [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52642f47-c460-ed0e-2fba-b20d88b188ea/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 811.572020] env[63028]: ERROR oslo_vmware.rw_handles [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52642f47-c460-ed0e-2fba-b20d88b188ea/disk-0.vmdk due to incomplete transfer. [ 811.572020] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-74f4583b-2eb6-478f-bfd7-d5bf4c0379c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.582352] env[63028]: DEBUG oslo_vmware.rw_handles [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52642f47-c460-ed0e-2fba-b20d88b188ea/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 811.582652] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Uploaded image 411d94f7-582d-4070-b857-b6924605fda8 to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 811.584334] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 811.584629] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-30cabb31-13f2-412d-b73c-390fc5324d5b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.591649] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.591834] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.594049] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 811.594049] env[63028]: value = "task-2735525" [ 811.594049] env[63028]: _type = "Task" [ 811.594049] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.602946] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735525, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.697475] env[63028]: DEBUG nova.compute.manager [req-1dd354f5-c767-41e6-a1bf-07eaa25e9cf3 req-60252b3e-82e2-4e84-a3c8-105baf644071 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Received event network-vif-plugged-296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 811.697521] env[63028]: DEBUG oslo_concurrency.lockutils [req-1dd354f5-c767-41e6-a1bf-07eaa25e9cf3 req-60252b3e-82e2-4e84-a3c8-105baf644071 service nova] Acquiring lock "63524cd8-81de-419f-bb07-0326f3cb393f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.697720] env[63028]: DEBUG oslo_concurrency.lockutils [req-1dd354f5-c767-41e6-a1bf-07eaa25e9cf3 req-60252b3e-82e2-4e84-a3c8-105baf644071 service nova] Lock "63524cd8-81de-419f-bb07-0326f3cb393f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.697941] env[63028]: DEBUG oslo_concurrency.lockutils [req-1dd354f5-c767-41e6-a1bf-07eaa25e9cf3 req-60252b3e-82e2-4e84-a3c8-105baf644071 service nova] Lock "63524cd8-81de-419f-bb07-0326f3cb393f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.698099] env[63028]: DEBUG nova.compute.manager [req-1dd354f5-c767-41e6-a1bf-07eaa25e9cf3 req-60252b3e-82e2-4e84-a3c8-105baf644071 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] No waiting events found dispatching network-vif-plugged-296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 811.698294] env[63028]: WARNING nova.compute.manager [req-1dd354f5-c767-41e6-a1bf-07eaa25e9cf3 req-60252b3e-82e2-4e84-a3c8-105baf644071 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Received unexpected event network-vif-plugged-296dfd9e-84e1-4ea8-bd17-28920a6a048b for instance with vm_state building and task_state spawning. [ 811.737390] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Acquiring lock "refresh_cache-e346c31b-ef1b-4f75-8564-cefe26bd672f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.737551] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Acquired lock "refresh_cache-e346c31b-ef1b-4f75-8564-cefe26bd672f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.737723] env[63028]: DEBUG nova.network.neutron [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.750376] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735521, 'name': CreateVM_Task, 'duration_secs': 0.520366} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.754118] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 811.754118] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735522, 'name': Destroy_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.756291] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.756476] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.756797] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 811.757390] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d39d9b0-ff5d-4c79-8757-b56257658524 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.763103] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 811.763103] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c3ae4e-9f95-6e34-18e7-3a1fb702a101" [ 811.763103] env[63028]: _type = "Task" [ 811.763103] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.773284] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c3ae4e-9f95-6e34-18e7-3a1fb702a101, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.840501] env[63028]: DEBUG nova.compute.utils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 811.842052] env[63028]: DEBUG nova.compute.manager [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 811.842242] env[63028]: DEBUG nova.network.neutron [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 811.896126] env[63028]: DEBUG nova.policy [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3ed8f5b3d7b4be99d3b4649e156af58', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '847e89af959a4266ab55c1d2106ba8fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 811.948568] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735524, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.065913} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.950671] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=63028) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 811.953708] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f50575c-8d5b-4308-bcaf-80735b44e6cf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.981326] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] a1d00736-1a8d-46e0-9358-46e848b94797/ephemeral_0.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 811.986824] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46e4d72f-1865-4aa7-843e-abf52146bb52 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.010021] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 812.010021] env[63028]: value = "task-2735526" [ 812.010021] env[63028]: _type = "Task" [ 812.010021] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.020857] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735526, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.104376] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735525, 'name': Destroy_Task, 'duration_secs': 0.343194} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.106954] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Destroyed the VM [ 812.107213] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 812.107648] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-376d3586-3ee3-4ff3-8366-409c6c9404e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.115718] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 812.115718] env[63028]: value = "task-2735527" [ 812.115718] env[63028]: _type = "Task" [ 812.115718] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.129877] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735527, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.130454] env[63028]: DEBUG nova.network.neutron [req-c23de81d-fad9-4b7a-b37b-c64b324372f3 req-16a70dcf-df18-4250-a58c-66c3dc85efeb service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Updated VIF entry in instance network info cache for port 66cd0102-9651-45e1-8a38-f65e2f7dd800. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 812.131169] env[63028]: DEBUG nova.network.neutron [req-c23de81d-fad9-4b7a-b37b-c64b324372f3 req-16a70dcf-df18-4250-a58c-66c3dc85efeb service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Updating instance_info_cache with network_info: [{"id": "66cd0102-9651-45e1-8a38-f65e2f7dd800", "address": "fa:16:3e:ff:ab:ba", "network": {"id": "bd3f74f8-d12b-4d2e-9aa9-ca1c9e766bf4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1073484567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "178b95ba550d453db2b9868e72a8c93f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66cd0102-96", "ovs_interfaceid": "66cd0102-9651-45e1-8a38-f65e2f7dd800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.257270] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735522, 'name': Destroy_Task, 'duration_secs': 1.495379} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.257270] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Destroyed the VM [ 812.257270] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 812.257270] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b401dc78-be2a-40e7-8bd7-1bdd6f62f769 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.267010] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 812.267010] env[63028]: value = "task-2735528" [ 812.267010] env[63028]: _type = "Task" [ 812.267010] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.287724] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c3ae4e-9f95-6e34-18e7-3a1fb702a101, 'name': SearchDatastore_Task, 'duration_secs': 0.010899} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.294783] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.295132] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 812.295658] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.295939] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.296341] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 812.296558] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735528, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.297786] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0ed644e-a347-47ef-868f-bc3adac59478 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.302310] env[63028]: DEBUG nova.network.neutron [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Successfully created port: 391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 812.307603] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 812.307847] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 812.311788] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ec93b60-7a30-4096-a460-f05eeb7ea6e2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.315534] env[63028]: DEBUG nova.network.neutron [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.322185] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 812.322185] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a7630d-d6b1-2d4e-cef4-0e722d197506" [ 812.322185] env[63028]: _type = "Task" [ 812.322185] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.335417] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a7630d-d6b1-2d4e-cef4-0e722d197506, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.345726] env[63028]: DEBUG nova.compute.manager [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 812.408953] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "1d008794-3c1a-46c6-b4eb-3d5441efdb22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.409354] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "1d008794-3c1a-46c6-b4eb-3d5441efdb22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.479337] env[63028]: DEBUG nova.network.neutron [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Updating instance_info_cache with network_info: [{"id": "a24d4572-d230-46e3-82e9-72efb20f6178", "address": "fa:16:3e:50:ea:88", "network": {"id": "e86ff824-236f-4554-938b-ff003caed16a", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1150682083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fcbd7d1edc4403691c5e41ba033311d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa24d4572-d2", "ovs_interfaceid": "a24d4572-d230-46e3-82e9-72efb20f6178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.486321] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa9da84-30a6-4fbc-bff7-7f6db1057823 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.498825] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb19b31-3b7c-4ee6-b531-1d9bbe79f27a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.537225] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3725e9-a1c4-4aab-ad02-b444d5bb0000 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.546494] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735526, 'name': ReconfigVM_Task, 'duration_secs': 0.395037} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.549487] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Reconfigured VM instance instance-00000038 to attach disk [datastore1] a1d00736-1a8d-46e0-9358-46e848b94797/ephemeral_0.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 812.549487] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84228692-533f-4514-97ca-72d68913638b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.551962] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d078af-fcee-4962-b79e-e619dfcc517c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.567403] env[63028]: DEBUG nova.compute.provider_tree [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.573026] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 812.573026] env[63028]: value = "task-2735529" [ 812.573026] env[63028]: _type = "Task" [ 812.573026] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.580503] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735529, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.627775] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735527, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.633594] env[63028]: DEBUG oslo_concurrency.lockutils [req-c23de81d-fad9-4b7a-b37b-c64b324372f3 req-16a70dcf-df18-4250-a58c-66c3dc85efeb service nova] Releasing lock "refresh_cache-a1d00736-1a8d-46e0-9358-46e848b94797" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.788055] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735528, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.837537] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a7630d-d6b1-2d4e-cef4-0e722d197506, 'name': SearchDatastore_Task, 'duration_secs': 0.012202} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.838443] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64d03240-168c-484c-842c-48c0215fe00f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.845259] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 812.845259] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bedb27-e34f-98d1-71ba-0a1afa8c902e" [ 812.845259] env[63028]: _type = "Task" [ 812.845259] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.859479] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bedb27-e34f-98d1-71ba-0a1afa8c902e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.982173] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Releasing lock "refresh_cache-e346c31b-ef1b-4f75-8564-cefe26bd672f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.982594] env[63028]: DEBUG nova.compute.manager [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Instance network_info: |[{"id": "a24d4572-d230-46e3-82e9-72efb20f6178", "address": "fa:16:3e:50:ea:88", "network": {"id": "e86ff824-236f-4554-938b-ff003caed16a", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1150682083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fcbd7d1edc4403691c5e41ba033311d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa24d4572-d2", "ovs_interfaceid": "a24d4572-d230-46e3-82e9-72efb20f6178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 812.983083] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:ea:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd298db54-f13d-4bf6-b6c2-755074b3047f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a24d4572-d230-46e3-82e9-72efb20f6178', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 812.990439] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Creating folder: Project (8fcbd7d1edc4403691c5e41ba033311d). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 812.990759] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89a1593f-2bdd-49f4-81f4-caf6888ca7c3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.004022] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Created folder: Project (8fcbd7d1edc4403691c5e41ba033311d) in parent group-v550570. [ 813.004249] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Creating folder: Instances. Parent ref: group-v550755. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 813.004500] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7dfdca6b-0c3a-4765-92bd-d52b8325314d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.014649] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Created folder: Instances in parent group-v550755. [ 813.014893] env[63028]: DEBUG oslo.service.loopingcall [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 813.015106] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 813.015315] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27899120-2f58-4000-9fda-8d55321cda24 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.035730] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 813.035730] env[63028]: value = "task-2735532" [ 813.035730] env[63028]: _type = "Task" [ 813.035730] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.043214] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "f0ca0d73-d428-4b8c-acac-a80b7b7dd793" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.043400] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "f0ca0d73-d428-4b8c-acac-a80b7b7dd793" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.047880] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735532, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.071669] env[63028]: DEBUG nova.scheduler.client.report [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.084598] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735529, 'name': Rename_Task, 'duration_secs': 0.208523} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.084826] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 813.085229] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03b29117-b3e2-46b2-9a4d-342bf9b0a050 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.092636] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 813.092636] env[63028]: value = "task-2735533" [ 813.092636] env[63028]: _type = "Task" [ 813.092636] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.101389] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735533, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.128487] env[63028]: DEBUG oslo_vmware.api [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735527, 'name': RemoveSnapshot_Task, 'duration_secs': 0.556278} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.128795] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 813.129071] env[63028]: INFO nova.compute.manager [None req-323de9da-0151-4e02-98a3-478fcd44af97 tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Took 13.85 seconds to snapshot the instance on the hypervisor. [ 813.289399] env[63028]: DEBUG oslo_vmware.api [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735528, 'name': RemoveSnapshot_Task, 'duration_secs': 0.585444} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.289695] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 813.289976] env[63028]: INFO nova.compute.manager [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Took 14.57 seconds to snapshot the instance on the hypervisor. [ 813.356370] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bedb27-e34f-98d1-71ba-0a1afa8c902e, 'name': SearchDatastore_Task, 'duration_secs': 0.013935} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.357070] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.357070] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 63524cd8-81de-419f-bb07-0326f3cb393f/63524cd8-81de-419f-bb07-0326f3cb393f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 813.357317] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-843e52c8-0608-478e-8cc0-c0fa95e776a1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.361891] env[63028]: DEBUG nova.compute.manager [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 813.366283] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 813.366283] env[63028]: value = "task-2735534" [ 813.366283] env[63028]: _type = "Task" [ 813.366283] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.375288] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735534, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.388686] env[63028]: DEBUG nova.virt.hardware [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 813.388940] env[63028]: DEBUG nova.virt.hardware [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 813.389128] env[63028]: DEBUG nova.virt.hardware [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 813.389337] env[63028]: DEBUG nova.virt.hardware [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 813.389490] env[63028]: DEBUG nova.virt.hardware [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 813.389638] env[63028]: DEBUG nova.virt.hardware [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 813.389882] env[63028]: DEBUG nova.virt.hardware [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 813.390098] env[63028]: DEBUG nova.virt.hardware [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 813.390285] env[63028]: DEBUG nova.virt.hardware [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 813.390454] env[63028]: DEBUG nova.virt.hardware [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 813.390628] env[63028]: DEBUG nova.virt.hardware [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 813.391837] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05dafe9-daa2-4eed-bcfa-98396865c15a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.400156] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ea10ab-9a4d-4156-9120-0bc79204654c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.545933] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735532, 'name': CreateVM_Task, 'duration_secs': 0.344314} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.546117] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 813.546820] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.546992] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.547326] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 813.547580] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53898f85-a455-4e5b-8fe2-39203458067f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.552843] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Waiting for the task: (returnval){ [ 813.552843] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526ba0e6-14c9-2105-a880-e4f74a8033fb" [ 813.552843] env[63028]: _type = "Task" [ 813.552843] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.563867] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526ba0e6-14c9-2105-a880-e4f74a8033fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.579816] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.239s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.582079] env[63028]: DEBUG oslo_concurrency.lockutils [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.695s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.582452] env[63028]: DEBUG nova.objects.instance [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lazy-loading 'resources' on Instance uuid a4b0d948-d950-414a-b23f-faefa5ab038c {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 813.606449] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735533, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.609190] env[63028]: INFO nova.scheduler.client.report [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted allocations for instance af87f1a5-b413-4b26-be91-474ad1f73df8 [ 813.843929] env[63028]: DEBUG nova.compute.manager [None req-bd0f3b82-bcc2-443e-a883-684f0a6398bb tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Found 2 images (rotation: 2) {{(pid=63028) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 813.848069] env[63028]: DEBUG nova.compute.manager [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Received event network-changed-296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 813.848284] env[63028]: DEBUG nova.compute.manager [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Refreshing instance network info cache due to event network-changed-296dfd9e-84e1-4ea8-bd17-28920a6a048b. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 813.848505] env[63028]: DEBUG oslo_concurrency.lockutils [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] Acquiring lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.848681] env[63028]: DEBUG oslo_concurrency.lockutils [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] Acquired lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.848900] env[63028]: DEBUG nova.network.neutron [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Refreshing network info cache for port 296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 813.879351] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735534, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.069204] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526ba0e6-14c9-2105-a880-e4f74a8033fb, 'name': SearchDatastore_Task, 'duration_secs': 0.011922} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.070196] env[63028]: DEBUG nova.network.neutron [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Successfully updated port: 391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 814.076420] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.076420] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 814.076420] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.076420] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.076420] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 814.076420] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-144f24f1-4f2e-4948-9c0a-9bd688e3ccf1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.088922] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 814.089677] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 814.091221] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59395a1e-914b-4621-b895-c37f4ea237e6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.101599] env[63028]: DEBUG nova.compute.manager [req-7152f9be-cbc8-4ca3-8ce9-ff957f130e22 req-b47b817f-241f-4b09-84b5-5b1a5753eafd service nova] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Received event network-vif-plugged-391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 814.101822] env[63028]: DEBUG oslo_concurrency.lockutils [req-7152f9be-cbc8-4ca3-8ce9-ff957f130e22 req-b47b817f-241f-4b09-84b5-5b1a5753eafd service nova] Acquiring lock "70888889-4965-47ab-ad47-59f1c1286bd8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.102030] env[63028]: DEBUG oslo_concurrency.lockutils [req-7152f9be-cbc8-4ca3-8ce9-ff957f130e22 req-b47b817f-241f-4b09-84b5-5b1a5753eafd service nova] Lock "70888889-4965-47ab-ad47-59f1c1286bd8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.102251] env[63028]: DEBUG oslo_concurrency.lockutils [req-7152f9be-cbc8-4ca3-8ce9-ff957f130e22 req-b47b817f-241f-4b09-84b5-5b1a5753eafd service nova] Lock "70888889-4965-47ab-ad47-59f1c1286bd8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.102425] env[63028]: DEBUG nova.compute.manager [req-7152f9be-cbc8-4ca3-8ce9-ff957f130e22 req-b47b817f-241f-4b09-84b5-5b1a5753eafd service nova] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] No waiting events found dispatching network-vif-plugged-391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 814.102589] env[63028]: WARNING nova.compute.manager [req-7152f9be-cbc8-4ca3-8ce9-ff957f130e22 req-b47b817f-241f-4b09-84b5-5b1a5753eafd service nova] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Received unexpected event network-vif-plugged-391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0 for instance with vm_state building and task_state spawning. [ 814.105109] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Waiting for the task: (returnval){ [ 814.105109] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521dd39e-db9c-ed86-3b60-4e07291b52d8" [ 814.105109] env[63028]: _type = "Task" [ 814.105109] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.113875] env[63028]: DEBUG oslo_vmware.api [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735533, 'name': PowerOnVM_Task, 'duration_secs': 0.797132} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.115543] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 814.115543] env[63028]: INFO nova.compute.manager [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Took 10.51 seconds to spawn the instance on the hypervisor. [ 814.115543] env[63028]: DEBUG nova.compute.manager [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 814.117809] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d53450d6-dbb3-40d0-b918-b0930d6cbfee tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "af87f1a5-b413-4b26-be91-474ad1f73df8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.931s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.119718] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7984ef19-ec28-4cb3-91be-eba35b118393 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.128989] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521dd39e-db9c-ed86-3b60-4e07291b52d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.384214] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735534, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.738808} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.384214] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 63524cd8-81de-419f-bb07-0326f3cb393f/63524cd8-81de-419f-bb07-0326f3cb393f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 814.384214] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 814.384214] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1aa585f2-c1fb-4f9d-abe1-53519a23e67a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.393737] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 814.393737] env[63028]: value = "task-2735535" [ 814.393737] env[63028]: _type = "Task" [ 814.393737] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.409468] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735535, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.574452] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a415a90-e0a1-451d-a79d-5f88e6a3e9ee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.577702] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "refresh_cache-70888889-4965-47ab-ad47-59f1c1286bd8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.577702] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "refresh_cache-70888889-4965-47ab-ad47-59f1c1286bd8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.579112] env[63028]: DEBUG nova.network.neutron [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 814.587757] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994e9455-562d-4daa-903e-2b38ff075259 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.625084] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df96fd4e-c16f-4a06-8c58-356518357592 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.637402] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608bad88-5e4e-4ee3-9794-8329d03ee361 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.641665] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521dd39e-db9c-ed86-3b60-4e07291b52d8, 'name': SearchDatastore_Task, 'duration_secs': 0.016289} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.642569] env[63028]: DEBUG nova.network.neutron [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Updated VIF entry in instance network info cache for port 296dfd9e-84e1-4ea8-bd17-28920a6a048b. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 814.642915] env[63028]: DEBUG nova.network.neutron [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Updating instance_info_cache with network_info: [{"id": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "address": "fa:16:3e:bc:87:07", "network": {"id": "37013470-5bda-41a6-826c-03ac0d423c85", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1214578902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d3fdfda1694b2f9f5985831ea77a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap296dfd9e-84", "ovs_interfaceid": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.651442] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78ad4efc-4323-49bd-a087-2a7ec014c313 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.654921] env[63028]: INFO nova.compute.manager [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Took 53.37 seconds to build instance. [ 814.665156] env[63028]: DEBUG nova.compute.provider_tree [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.670947] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Waiting for the task: (returnval){ [ 814.670947] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d65f13-ad48-704d-7b36-15e91404003a" [ 814.670947] env[63028]: _type = "Task" [ 814.670947] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.684463] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d65f13-ad48-704d-7b36-15e91404003a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.906589] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735535, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070158} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.906877] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 814.907667] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c811d5-1e79-4734-951e-b06c5e719f7a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.934085] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 63524cd8-81de-419f-bb07-0326f3cb393f/63524cd8-81de-419f-bb07-0326f3cb393f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 814.936501] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-981924bc-bac7-41e0-a026-8d8b1f9ab4d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.956260] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 814.956260] env[63028]: value = "task-2735536" [ 814.956260] env[63028]: _type = "Task" [ 814.956260] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.965445] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735536, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.120515] env[63028]: DEBUG nova.network.neutron [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 815.150951] env[63028]: DEBUG oslo_concurrency.lockutils [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] Releasing lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.151268] env[63028]: DEBUG nova.compute.manager [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Received event network-vif-plugged-a24d4572-d230-46e3-82e9-72efb20f6178 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 815.151626] env[63028]: DEBUG oslo_concurrency.lockutils [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] Acquiring lock "e346c31b-ef1b-4f75-8564-cefe26bd672f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.151748] env[63028]: DEBUG oslo_concurrency.lockutils [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] Lock "e346c31b-ef1b-4f75-8564-cefe26bd672f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.152711] env[63028]: DEBUG oslo_concurrency.lockutils [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] Lock "e346c31b-ef1b-4f75-8564-cefe26bd672f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.152711] env[63028]: DEBUG nova.compute.manager [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] No waiting events found dispatching network-vif-plugged-a24d4572-d230-46e3-82e9-72efb20f6178 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 815.152711] env[63028]: WARNING nova.compute.manager [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Received unexpected event network-vif-plugged-a24d4572-d230-46e3-82e9-72efb20f6178 for instance with vm_state building and task_state spawning. [ 815.152711] env[63028]: DEBUG nova.compute.manager [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Received event network-changed-a24d4572-d230-46e3-82e9-72efb20f6178 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 815.152711] env[63028]: DEBUG nova.compute.manager [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Refreshing instance network info cache due to event network-changed-a24d4572-d230-46e3-82e9-72efb20f6178. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 815.152711] env[63028]: DEBUG oslo_concurrency.lockutils [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] Acquiring lock "refresh_cache-e346c31b-ef1b-4f75-8564-cefe26bd672f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.152916] env[63028]: DEBUG oslo_concurrency.lockutils [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] Acquired lock "refresh_cache-e346c31b-ef1b-4f75-8564-cefe26bd672f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.152916] env[63028]: DEBUG nova.network.neutron [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Refreshing network info cache for port a24d4572-d230-46e3-82e9-72efb20f6178 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 815.167282] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a647d5f8-656b-4f8b-a625-07443c814584 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "a1d00736-1a8d-46e0-9358-46e848b94797" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.202s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.169116] env[63028]: DEBUG nova.scheduler.client.report [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 815.183011] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "3b90dbb8-66ce-435f-beae-5464720bfb3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.183243] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "3b90dbb8-66ce-435f-beae-5464720bfb3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.193740] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d65f13-ad48-704d-7b36-15e91404003a, 'name': SearchDatastore_Task, 'duration_secs': 0.019588} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.193981] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.194235] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] e346c31b-ef1b-4f75-8564-cefe26bd672f/e346c31b-ef1b-4f75-8564-cefe26bd672f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 815.194482] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38a5145f-fc20-4198-ab3e-838eb83d2dc0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.203041] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Waiting for the task: (returnval){ [ 815.203041] env[63028]: value = "task-2735537" [ 815.203041] env[63028]: _type = "Task" [ 815.203041] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.211750] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.264772] env[63028]: DEBUG nova.compute.manager [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 815.265692] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f200066d-e303-47c4-b66e-9b4e3a88f959 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.319204] env[63028]: DEBUG nova.network.neutron [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Updating instance_info_cache with network_info: [{"id": "391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0", "address": "fa:16:3e:7f:a5:72", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap391ce9f8-94", "ovs_interfaceid": "391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.467418] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735536, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.674552] env[63028]: DEBUG nova.compute.manager [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 815.681814] env[63028]: DEBUG oslo_concurrency.lockutils [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.100s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.684047] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.241s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.692038] env[63028]: INFO nova.compute.claims [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 815.715924] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.777008] env[63028]: INFO nova.compute.manager [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] instance snapshotting [ 815.777742] env[63028]: DEBUG nova.objects.instance [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'flavor' on Instance uuid c06813c4-472d-4bf9-84ec-0d01306bcd48 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 815.822193] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "refresh_cache-70888889-4965-47ab-ad47-59f1c1286bd8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.822627] env[63028]: DEBUG nova.compute.manager [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Instance network_info: |[{"id": "391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0", "address": "fa:16:3e:7f:a5:72", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap391ce9f8-94", "ovs_interfaceid": "391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 815.823081] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:a5:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 815.831854] env[63028]: DEBUG oslo.service.loopingcall [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 815.833214] env[63028]: INFO nova.scheduler.client.report [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Deleted allocations for instance a4b0d948-d950-414a-b23f-faefa5ab038c [ 815.837816] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 815.840126] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b665a17-b4d6-4b03-9718-82fcb6c78abe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.864612] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 815.864612] env[63028]: value = "task-2735538" [ 815.864612] env[63028]: _type = "Task" [ 815.864612] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.875606] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735538, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.969217] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735536, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.172198] env[63028]: DEBUG nova.network.neutron [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Updated VIF entry in instance network info cache for port a24d4572-d230-46e3-82e9-72efb20f6178. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 816.172532] env[63028]: DEBUG nova.network.neutron [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Updating instance_info_cache with network_info: [{"id": "a24d4572-d230-46e3-82e9-72efb20f6178", "address": "fa:16:3e:50:ea:88", "network": {"id": "e86ff824-236f-4554-938b-ff003caed16a", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1150682083-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fcbd7d1edc4403691c5e41ba033311d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa24d4572-d2", "ovs_interfaceid": "a24d4572-d230-46e3-82e9-72efb20f6178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.194238] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.216835] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735537, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.921848} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.218438] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] e346c31b-ef1b-4f75-8564-cefe26bd672f/e346c31b-ef1b-4f75-8564-cefe26bd672f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 816.218438] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 816.218438] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bed639f-598d-461f-b262-1038e368f96d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.225883] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Waiting for the task: (returnval){ [ 816.225883] env[63028]: value = "task-2735539" [ 816.225883] env[63028]: _type = "Task" [ 816.225883] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.235572] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735539, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.284553] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17a75b5-75bd-45d7-9502-200898d366ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.307570] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb382134-b8ab-4efa-8fc3-b4e7332feee5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.361903] env[63028]: DEBUG oslo_concurrency.lockutils [None req-273a0846-f74e-4cbe-88c7-25ef6a67da40 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "a4b0d948-d950-414a-b23f-faefa5ab038c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.935s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.378094] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735538, 'name': CreateVM_Task, 'duration_secs': 0.45014} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.379968] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 816.379968] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.379968] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.379968] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 816.379968] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f281fbd-b2f6-4ca7-ba61-0e66be240b46 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.386336] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 816.386336] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525a8acc-0ec3-9257-df6d-b4658c799eb3" [ 816.386336] env[63028]: _type = "Task" [ 816.386336] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.395155] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525a8acc-0ec3-9257-df6d-b4658c799eb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.414143] env[63028]: DEBUG nova.compute.manager [req-5bf85da9-7bd0-4433-8362-37b6e780a176 req-c8ee5e72-59aa-4dc3-aa7d-2a74f5847895 service nova] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Received event network-changed-391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 816.414359] env[63028]: DEBUG nova.compute.manager [req-5bf85da9-7bd0-4433-8362-37b6e780a176 req-c8ee5e72-59aa-4dc3-aa7d-2a74f5847895 service nova] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Refreshing instance network info cache due to event network-changed-391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 816.414606] env[63028]: DEBUG oslo_concurrency.lockutils [req-5bf85da9-7bd0-4433-8362-37b6e780a176 req-c8ee5e72-59aa-4dc3-aa7d-2a74f5847895 service nova] Acquiring lock "refresh_cache-70888889-4965-47ab-ad47-59f1c1286bd8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.414748] env[63028]: DEBUG oslo_concurrency.lockutils [req-5bf85da9-7bd0-4433-8362-37b6e780a176 req-c8ee5e72-59aa-4dc3-aa7d-2a74f5847895 service nova] Acquired lock "refresh_cache-70888889-4965-47ab-ad47-59f1c1286bd8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.414915] env[63028]: DEBUG nova.network.neutron [req-5bf85da9-7bd0-4433-8362-37b6e780a176 req-c8ee5e72-59aa-4dc3-aa7d-2a74f5847895 service nova] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Refreshing network info cache for port 391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 816.467628] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735536, 'name': ReconfigVM_Task, 'duration_secs': 1.147876} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.467937] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 63524cd8-81de-419f-bb07-0326f3cb393f/63524cd8-81de-419f-bb07-0326f3cb393f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 816.470857] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12d92b7a-c887-4882-adf3-0fd5e22474bd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.482142] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 816.482142] env[63028]: value = "task-2735540" [ 816.482142] env[63028]: _type = "Task" [ 816.482142] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.492681] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735540, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.531361] env[63028]: DEBUG oslo_concurrency.lockutils [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "5982cd5d-abf1-42d4-bb44-8d79de599f11" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.531633] env[63028]: DEBUG oslo_concurrency.lockutils [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "5982cd5d-abf1-42d4-bb44-8d79de599f11" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.531848] env[63028]: DEBUG oslo_concurrency.lockutils [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "5982cd5d-abf1-42d4-bb44-8d79de599f11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.532047] env[63028]: DEBUG oslo_concurrency.lockutils [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "5982cd5d-abf1-42d4-bb44-8d79de599f11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.532228] env[63028]: DEBUG oslo_concurrency.lockutils [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "5982cd5d-abf1-42d4-bb44-8d79de599f11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.535861] env[63028]: INFO nova.compute.manager [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Terminating instance [ 816.675856] env[63028]: DEBUG oslo_concurrency.lockutils [req-a36006ad-d5d1-4eb3-88ad-797e1bd47181 req-91037ece-a7ab-466f-84e3-bf61e5a54bf8 service nova] Releasing lock "refresh_cache-e346c31b-ef1b-4f75-8564-cefe26bd672f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.736496] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735539, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075785} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.736800] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 816.739832] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50bfe57-5e23-4b19-bd8c-1fea0f8b7054 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.764218] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] e346c31b-ef1b-4f75-8564-cefe26bd672f/e346c31b-ef1b-4f75-8564-cefe26bd672f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.768365] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12d5a949-7abe-4d72-8e14-4979d3ac150a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.791171] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Waiting for the task: (returnval){ [ 816.791171] env[63028]: value = "task-2735541" [ 816.791171] env[63028]: _type = "Task" [ 816.791171] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.805377] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735541, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.819386] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 816.819718] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6e876c23-ea61-472f-ad4a-8470f7cb9f9a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.830995] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 816.830995] env[63028]: value = "task-2735542" [ 816.830995] env[63028]: _type = "Task" [ 816.830995] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.843679] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735542, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.898815] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525a8acc-0ec3-9257-df6d-b4658c799eb3, 'name': SearchDatastore_Task, 'duration_secs': 0.010007} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.899170] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.899481] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 816.899642] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.899789] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.900123] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 816.902716] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9242df1f-7900-4438-bd94-55caf6f0c224 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.914701] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 816.914906] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 816.918128] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b12f5f01-8fa3-4e9a-8ac7-81b860129b87 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.928937] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 816.928937] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5220c430-178e-4a47-a952-58241c9befe8" [ 816.928937] env[63028]: _type = "Task" [ 816.928937] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.939679] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5220c430-178e-4a47-a952-58241c9befe8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.993341] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735540, 'name': Rename_Task, 'duration_secs': 0.164229} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.993611] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 816.993861] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37de09c9-a4c5-4db2-9d4f-e7e4def04765 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.003740] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 817.003740] env[63028]: value = "task-2735543" [ 817.003740] env[63028]: _type = "Task" [ 817.003740] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.015750] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735543, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.039389] env[63028]: DEBUG nova.compute.manager [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 817.039749] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 817.040904] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb1e1f1-c388-4db8-b857-b6fe37a075c1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.051458] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 817.052322] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-710e8dbe-ce15-400f-a07a-ebec35f50185 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.060630] env[63028]: DEBUG oslo_vmware.api [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 817.060630] env[63028]: value = "task-2735544" [ 817.060630] env[63028]: _type = "Task" [ 817.060630] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.071616] env[63028]: DEBUG oslo_vmware.api [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.241836] env[63028]: DEBUG nova.network.neutron [req-5bf85da9-7bd0-4433-8362-37b6e780a176 req-c8ee5e72-59aa-4dc3-aa7d-2a74f5847895 service nova] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Updated VIF entry in instance network info cache for port 391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 817.242320] env[63028]: DEBUG nova.network.neutron [req-5bf85da9-7bd0-4433-8362-37b6e780a176 req-c8ee5e72-59aa-4dc3-aa7d-2a74f5847895 service nova] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Updating instance_info_cache with network_info: [{"id": "391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0", "address": "fa:16:3e:7f:a5:72", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap391ce9f8-94", "ovs_interfaceid": "391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.261464] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18365525-f785-41a4-bd8f-f69361dda544 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.272941] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3a1c0b-5bae-4d6e-b74b-3cbc5fa8d23c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.308632] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597489ad-0155-478f-a483-84e72a785589 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.317254] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735541, 'name': ReconfigVM_Task, 'duration_secs': 0.33076} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.319680] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Reconfigured VM instance instance-0000003a to attach disk [datastore2] e346c31b-ef1b-4f75-8564-cefe26bd672f/e346c31b-ef1b-4f75-8564-cefe26bd672f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.320438] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-107d5dc3-550b-4083-a797-af668e5634d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.323203] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b0f45d-f987-4a6d-a630-5a7469efcdd4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.340583] env[63028]: DEBUG nova.compute.provider_tree [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.347848] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Waiting for the task: (returnval){ [ 817.347848] env[63028]: value = "task-2735545" [ 817.347848] env[63028]: _type = "Task" [ 817.347848] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.355692] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735542, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.361227] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735545, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.441338] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5220c430-178e-4a47-a952-58241c9befe8, 'name': SearchDatastore_Task, 'duration_secs': 0.014979} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.442218] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1568e88-2385-4522-92bb-724f98a0e252 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.448132] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 817.448132] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5246f6f5-33e1-d481-949e-a69e5ee32701" [ 817.448132] env[63028]: _type = "Task" [ 817.448132] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.457686] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5246f6f5-33e1-d481-949e-a69e5ee32701, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.513366] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735543, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.572387] env[63028]: DEBUG oslo_vmware.api [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735544, 'name': PowerOffVM_Task, 'duration_secs': 0.307936} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.572387] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 817.572387] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 817.572387] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d14b5565-a645-49c2-92b0-c48b05ffee0f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.642844] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 817.643074] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 817.643265] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Deleting the datastore file [datastore2] 5982cd5d-abf1-42d4-bb44-8d79de599f11 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 817.643524] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ddd20079-2dcd-49f3-9c20-1ea4a84267db {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.650110] env[63028]: DEBUG oslo_vmware.api [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 817.650110] env[63028]: value = "task-2735547" [ 817.650110] env[63028]: _type = "Task" [ 817.650110] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.657533] env[63028]: DEBUG oslo_vmware.api [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.744930] env[63028]: DEBUG oslo_concurrency.lockutils [req-5bf85da9-7bd0-4433-8362-37b6e780a176 req-c8ee5e72-59aa-4dc3-aa7d-2a74f5847895 service nova] Releasing lock "refresh_cache-70888889-4965-47ab-ad47-59f1c1286bd8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.844518] env[63028]: DEBUG nova.scheduler.client.report [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 817.854426] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735542, 'name': CreateSnapshot_Task, 'duration_secs': 0.612331} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.855014] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 817.855874] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050b49d7-4e7a-4447-b3e7-b11902eacc14 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.860698] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735545, 'name': Rename_Task, 'duration_secs': 0.157174} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.861774] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 817.861992] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4737f4e1-eb0a-4a53-a53d-2b751ac3035e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.872773] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Waiting for the task: (returnval){ [ 817.872773] env[63028]: value = "task-2735548" [ 817.872773] env[63028]: _type = "Task" [ 817.872773] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.880266] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735548, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.958464] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5246f6f5-33e1-d481-949e-a69e5ee32701, 'name': SearchDatastore_Task, 'duration_secs': 0.037057} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.958784] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.959063] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 70888889-4965-47ab-ad47-59f1c1286bd8/70888889-4965-47ab-ad47-59f1c1286bd8.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 817.959384] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f2be447-c1db-4030-ab1e-1cceed0c54ca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.966266] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 817.966266] env[63028]: value = "task-2735549" [ 817.966266] env[63028]: _type = "Task" [ 817.966266] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.974797] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735549, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.001881] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "a2f7d7c6-7931-4b21-a29c-bb9965577210" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.002245] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "a2f7d7c6-7931-4b21-a29c-bb9965577210" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.014853] env[63028]: DEBUG oslo_vmware.api [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735543, 'name': PowerOnVM_Task, 'duration_secs': 1.005578} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.015131] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 818.015357] env[63028]: INFO nova.compute.manager [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Took 12.17 seconds to spawn the instance on the hypervisor. [ 818.015550] env[63028]: DEBUG nova.compute.manager [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 818.016331] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f464966-66c3-4744-bd64-f28ee29dc371 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.160215] env[63028]: DEBUG oslo_vmware.api [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.305359} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.160448] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 818.160634] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 818.160813] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 818.161056] env[63028]: INFO nova.compute.manager [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Took 1.12 seconds to destroy the instance on the hypervisor. [ 818.161319] env[63028]: DEBUG oslo.service.loopingcall [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 818.161511] env[63028]: DEBUG nova.compute.manager [-] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 818.161603] env[63028]: DEBUG nova.network.neutron [-] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 818.349417] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.350071] env[63028]: DEBUG nova.compute.manager [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 818.353122] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.164s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.354749] env[63028]: INFO nova.compute.claims [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 818.376625] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 818.377051] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4d8c70ac-4127-48ca-8a08-1329d9d300a1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.389452] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735548, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.391129] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 818.391129] env[63028]: value = "task-2735550" [ 818.391129] env[63028]: _type = "Task" [ 818.391129] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.402619] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735550, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.477686] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735549, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.535934] env[63028]: INFO nova.compute.manager [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Took 56.95 seconds to build instance. [ 818.743225] env[63028]: DEBUG nova.compute.manager [req-2403c425-3f62-43d5-9ab1-2822d0dd3838 req-8a05f291-b15c-4708-9ee7-fcfcb25b95df service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Received event network-changed-66cd0102-9651-45e1-8a38-f65e2f7dd800 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 818.743225] env[63028]: DEBUG nova.compute.manager [req-2403c425-3f62-43d5-9ab1-2822d0dd3838 req-8a05f291-b15c-4708-9ee7-fcfcb25b95df service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Refreshing instance network info cache due to event network-changed-66cd0102-9651-45e1-8a38-f65e2f7dd800. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 818.743225] env[63028]: DEBUG oslo_concurrency.lockutils [req-2403c425-3f62-43d5-9ab1-2822d0dd3838 req-8a05f291-b15c-4708-9ee7-fcfcb25b95df service nova] Acquiring lock "refresh_cache-a1d00736-1a8d-46e0-9358-46e848b94797" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.743225] env[63028]: DEBUG oslo_concurrency.lockutils [req-2403c425-3f62-43d5-9ab1-2822d0dd3838 req-8a05f291-b15c-4708-9ee7-fcfcb25b95df service nova] Acquired lock "refresh_cache-a1d00736-1a8d-46e0-9358-46e848b94797" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.747248] env[63028]: DEBUG nova.network.neutron [req-2403c425-3f62-43d5-9ab1-2822d0dd3838 req-8a05f291-b15c-4708-9ee7-fcfcb25b95df service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Refreshing network info cache for port 66cd0102-9651-45e1-8a38-f65e2f7dd800 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 818.865079] env[63028]: DEBUG nova.compute.utils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 818.865079] env[63028]: DEBUG nova.compute.manager [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 818.865079] env[63028]: DEBUG nova.network.neutron [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 818.890618] env[63028]: DEBUG oslo_vmware.api [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735548, 'name': PowerOnVM_Task, 'duration_secs': 0.53692} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.891383] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 818.894017] env[63028]: INFO nova.compute.manager [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Took 8.10 seconds to spawn the instance on the hypervisor. [ 818.894017] env[63028]: DEBUG nova.compute.manager [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 818.894017] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420c396a-7972-48f8-81cf-a01c9daaa34b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.912820] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735550, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.962447] env[63028]: DEBUG nova.policy [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8478e45562394a0d8fafc5e3e1218fd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05118b378b5e4d838962db2378b381bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 818.977097] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735549, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.562555} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.977097] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 70888889-4965-47ab-ad47-59f1c1286bd8/70888889-4965-47ab-ad47-59f1c1286bd8.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 818.978021] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 818.978450] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03d74aa3-b162-44e1-85e8-a3ba46cd0ea3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.987741] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 818.987741] env[63028]: value = "task-2735551" [ 818.987741] env[63028]: _type = "Task" [ 818.987741] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.996491] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735551, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.014336] env[63028]: DEBUG nova.network.neutron [-] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.038927] env[63028]: DEBUG oslo_concurrency.lockutils [None req-72a36364-d2e4-49f2-bc10-dbe3424d39a6 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "63524cd8-81de-419f-bb07-0326f3cb393f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.084s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.302261] env[63028]: DEBUG nova.network.neutron [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Successfully created port: 1e514afb-c9a6-43c6-bfec-ac1a32620674 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.371376] env[63028]: DEBUG nova.compute.manager [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 819.403231] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735550, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.421920] env[63028]: INFO nova.compute.manager [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Took 52.73 seconds to build instance. [ 819.499704] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735551, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063786} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.505106] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 819.506662] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50869873-c679-44d2-85ef-dc140fbe25c9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.524855] env[63028]: INFO nova.compute.manager [-] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Took 1.36 seconds to deallocate network for instance. [ 819.536807] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 70888889-4965-47ab-ad47-59f1c1286bd8/70888889-4965-47ab-ad47-59f1c1286bd8.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 819.542249] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0cc5a88-fa5a-43fc-af39-04a87ccc203f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.556721] env[63028]: DEBUG nova.compute.manager [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 819.570051] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 819.570051] env[63028]: value = "task-2735552" [ 819.570051] env[63028]: _type = "Task" [ 819.570051] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.577851] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735552, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.855586] env[63028]: DEBUG nova.network.neutron [req-2403c425-3f62-43d5-9ab1-2822d0dd3838 req-8a05f291-b15c-4708-9ee7-fcfcb25b95df service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Updated VIF entry in instance network info cache for port 66cd0102-9651-45e1-8a38-f65e2f7dd800. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 819.855998] env[63028]: DEBUG nova.network.neutron [req-2403c425-3f62-43d5-9ab1-2822d0dd3838 req-8a05f291-b15c-4708-9ee7-fcfcb25b95df service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Updating instance_info_cache with network_info: [{"id": "66cd0102-9651-45e1-8a38-f65e2f7dd800", "address": "fa:16:3e:ff:ab:ba", "network": {"id": "bd3f74f8-d12b-4d2e-9aa9-ca1c9e766bf4", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1073484567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "178b95ba550d453db2b9868e72a8c93f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66cd0102-96", "ovs_interfaceid": "66cd0102-9651-45e1-8a38-f65e2f7dd800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.904843] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735550, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.925482] env[63028]: DEBUG oslo_concurrency.lockutils [None req-114b3219-d7c5-4835-8ed6-9b7d0ce0f081 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Lock "e346c31b-ef1b-4f75-8564-cefe26bd672f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.031s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.004257] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55801197-9acb-4f0d-a5c2-58f915a51bb8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.015017] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0dd921-f122-4bab-ae13-954846bbb1cb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.048738] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4b4557-671c-4f78-a234-2136a284defa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.056350] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e7a68b-8622-4444-afa5-5cef2ab93218 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.072971] env[63028]: DEBUG oslo_concurrency.lockutils [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.073731] env[63028]: DEBUG nova.compute.provider_tree [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.092914] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735552, 'name': ReconfigVM_Task, 'duration_secs': 0.399265} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.093991] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.094332] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 70888889-4965-47ab-ad47-59f1c1286bd8/70888889-4965-47ab-ad47-59f1c1286bd8.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 820.096230] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8722ddb3-fd75-4097-9b8d-eb03925868bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.103120] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 820.103120] env[63028]: value = "task-2735553" [ 820.103120] env[63028]: _type = "Task" [ 820.103120] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.113027] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735553, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.359236] env[63028]: DEBUG oslo_concurrency.lockutils [req-2403c425-3f62-43d5-9ab1-2822d0dd3838 req-8a05f291-b15c-4708-9ee7-fcfcb25b95df service nova] Releasing lock "refresh_cache-a1d00736-1a8d-46e0-9358-46e848b94797" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.359539] env[63028]: DEBUG nova.compute.manager [req-2403c425-3f62-43d5-9ab1-2822d0dd3838 req-8a05f291-b15c-4708-9ee7-fcfcb25b95df service nova] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Received event network-vif-deleted-968c9c64-3e8d-442c-8090-cc34f396bc28 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 820.359725] env[63028]: INFO nova.compute.manager [req-2403c425-3f62-43d5-9ab1-2822d0dd3838 req-8a05f291-b15c-4708-9ee7-fcfcb25b95df service nova] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Neutron deleted interface 968c9c64-3e8d-442c-8090-cc34f396bc28; detaching it from the instance and deleting it from the info cache [ 820.359924] env[63028]: DEBUG nova.network.neutron [req-2403c425-3f62-43d5-9ab1-2822d0dd3838 req-8a05f291-b15c-4708-9ee7-fcfcb25b95df service nova] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.379981] env[63028]: DEBUG nova.compute.manager [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 820.405874] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735550, 'name': CloneVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.408007] env[63028]: DEBUG nova.virt.hardware [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 820.408250] env[63028]: DEBUG nova.virt.hardware [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.408412] env[63028]: DEBUG nova.virt.hardware [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 820.408595] env[63028]: DEBUG nova.virt.hardware [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.408740] env[63028]: DEBUG nova.virt.hardware [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 820.408888] env[63028]: DEBUG nova.virt.hardware [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 820.409226] env[63028]: DEBUG nova.virt.hardware [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 820.409405] env[63028]: DEBUG nova.virt.hardware [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 820.409577] env[63028]: DEBUG nova.virt.hardware [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 820.409737] env[63028]: DEBUG nova.virt.hardware [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 820.409983] env[63028]: DEBUG nova.virt.hardware [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 820.410760] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd869c18-db7d-4626-8507-0ba4c34949c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.418058] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25951c9b-0b46-4334-95cd-fd3e33ae112f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.433981] env[63028]: DEBUG nova.compute.manager [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 820.540892] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "70147f2f-0b5e-4343-84e4-8bc195a5485d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.540892] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "70147f2f-0b5e-4343-84e4-8bc195a5485d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.582042] env[63028]: DEBUG nova.scheduler.client.report [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 820.588876] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Acquiring lock "e346c31b-ef1b-4f75-8564-cefe26bd672f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.589171] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Lock "e346c31b-ef1b-4f75-8564-cefe26bd672f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.589408] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Acquiring lock "e346c31b-ef1b-4f75-8564-cefe26bd672f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.589610] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Lock "e346c31b-ef1b-4f75-8564-cefe26bd672f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.589784] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Lock "e346c31b-ef1b-4f75-8564-cefe26bd672f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.591905] env[63028]: INFO nova.compute.manager [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Terminating instance [ 820.612873] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735553, 'name': Rename_Task, 'duration_secs': 0.146343} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.613138] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 820.613380] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04b0ea92-7885-4200-9936-27ad102dbea8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.619152] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 820.619152] env[63028]: value = "task-2735554" [ 820.619152] env[63028]: _type = "Task" [ 820.619152] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.626740] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735554, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.841495] env[63028]: DEBUG nova.compute.manager [req-da29e0ec-fcf0-486f-91c0-dfa68033d306 req-86e5bb65-1109-4d2a-bf6b-e4413e8ab564 service nova] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Received event network-vif-plugged-1e514afb-c9a6-43c6-bfec-ac1a32620674 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 820.841590] env[63028]: DEBUG oslo_concurrency.lockutils [req-da29e0ec-fcf0-486f-91c0-dfa68033d306 req-86e5bb65-1109-4d2a-bf6b-e4413e8ab564 service nova] Acquiring lock "514c83d1-4fb1-435c-8c25-aa112c744131-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.841765] env[63028]: DEBUG oslo_concurrency.lockutils [req-da29e0ec-fcf0-486f-91c0-dfa68033d306 req-86e5bb65-1109-4d2a-bf6b-e4413e8ab564 service nova] Lock "514c83d1-4fb1-435c-8c25-aa112c744131-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.841926] env[63028]: DEBUG oslo_concurrency.lockutils [req-da29e0ec-fcf0-486f-91c0-dfa68033d306 req-86e5bb65-1109-4d2a-bf6b-e4413e8ab564 service nova] Lock "514c83d1-4fb1-435c-8c25-aa112c744131-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.842255] env[63028]: DEBUG nova.compute.manager [req-da29e0ec-fcf0-486f-91c0-dfa68033d306 req-86e5bb65-1109-4d2a-bf6b-e4413e8ab564 service nova] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] No waiting events found dispatching network-vif-plugged-1e514afb-c9a6-43c6-bfec-ac1a32620674 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 820.842592] env[63028]: WARNING nova.compute.manager [req-da29e0ec-fcf0-486f-91c0-dfa68033d306 req-86e5bb65-1109-4d2a-bf6b-e4413e8ab564 service nova] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Received unexpected event network-vif-plugged-1e514afb-c9a6-43c6-bfec-ac1a32620674 for instance with vm_state building and task_state spawning. [ 820.863955] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39e1212c-8f67-4828-853f-c4ede35003cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.874214] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e4d525-d004-4630-b3d0-98bf31e65d82 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.916365] env[63028]: DEBUG nova.compute.manager [req-2403c425-3f62-43d5-9ab1-2822d0dd3838 req-8a05f291-b15c-4708-9ee7-fcfcb25b95df service nova] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Detach interface failed, port_id=968c9c64-3e8d-442c-8090-cc34f396bc28, reason: Instance 5982cd5d-abf1-42d4-bb44-8d79de599f11 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 820.922182] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735550, 'name': CloneVM_Task, 'duration_secs': 2.08957} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.922530] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Created linked-clone VM from snapshot [ 820.923326] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f50e311-f905-465a-89a3-1e79977c4bca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.930967] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Uploading image e5fa6135-d800-4c70-a08e-8ec0f4d40fa6 {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 820.946858] env[63028]: DEBUG nova.network.neutron [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Successfully updated port: 1e514afb-c9a6-43c6-bfec-ac1a32620674 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 820.961469] env[63028]: DEBUG oslo_vmware.rw_handles [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 820.961469] env[63028]: value = "vm-550760" [ 820.961469] env[63028]: _type = "VirtualMachine" [ 820.961469] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 820.961469] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-df405303-9d1c-41f7-b474-7dc00f972707 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.963387] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.970488] env[63028]: DEBUG oslo_vmware.rw_handles [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lease: (returnval){ [ 820.970488] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524b10df-cbf0-e2c0-06ec-84d7ae16de19" [ 820.970488] env[63028]: _type = "HttpNfcLease" [ 820.970488] env[63028]: } obtained for exporting VM: (result){ [ 820.970488] env[63028]: value = "vm-550760" [ 820.970488] env[63028]: _type = "VirtualMachine" [ 820.970488] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 820.970829] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the lease: (returnval){ [ 820.970829] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524b10df-cbf0-e2c0-06ec-84d7ae16de19" [ 820.970829] env[63028]: _type = "HttpNfcLease" [ 820.970829] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 820.978305] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 820.978305] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524b10df-cbf0-e2c0-06ec-84d7ae16de19" [ 820.978305] env[63028]: _type = "HttpNfcLease" [ 820.978305] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 821.087251] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.087901] env[63028]: DEBUG nova.compute.manager [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 821.092247] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.432s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.093790] env[63028]: INFO nova.compute.claims [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.101573] env[63028]: DEBUG nova.compute.manager [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 821.101636] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 821.105224] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdad59b2-2c34-4899-bc48-0405597c1aa3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.115101] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 821.115101] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b138b06-2420-462c-8d2b-dbb82c2dddde {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.125670] env[63028]: DEBUG oslo_vmware.api [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Waiting for the task: (returnval){ [ 821.125670] env[63028]: value = "task-2735556" [ 821.125670] env[63028]: _type = "Task" [ 821.125670] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.133423] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735554, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.138642] env[63028]: DEBUG oslo_vmware.api [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.453069] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "refresh_cache-514c83d1-4fb1-435c-8c25-aa112c744131" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.453069] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "refresh_cache-514c83d1-4fb1-435c-8c25-aa112c744131" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.453069] env[63028]: DEBUG nova.network.neutron [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 821.480058] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 821.480058] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524b10df-cbf0-e2c0-06ec-84d7ae16de19" [ 821.480058] env[63028]: _type = "HttpNfcLease" [ 821.480058] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 821.480454] env[63028]: DEBUG oslo_vmware.rw_handles [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 821.480454] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524b10df-cbf0-e2c0-06ec-84d7ae16de19" [ 821.480454] env[63028]: _type = "HttpNfcLease" [ 821.480454] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 821.481358] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f103feab-d4b5-4eab-9812-b259b126178a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.489267] env[63028]: DEBUG oslo_vmware.rw_handles [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52984004-3adf-7da2-b424-e42ae4ceec18/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 821.489476] env[63028]: DEBUG oslo_vmware.rw_handles [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52984004-3adf-7da2-b424-e42ae4ceec18/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 821.588732] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-22b1257e-47ac-474a-a544-6a09e4cecabd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.602889] env[63028]: DEBUG nova.compute.utils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 821.609282] env[63028]: DEBUG nova.compute.manager [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 821.609485] env[63028]: DEBUG nova.network.neutron [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 821.636720] env[63028]: DEBUG oslo_vmware.api [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735554, 'name': PowerOnVM_Task, 'duration_secs': 0.536787} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.636933] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 821.637162] env[63028]: INFO nova.compute.manager [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Took 8.28 seconds to spawn the instance on the hypervisor. [ 821.637365] env[63028]: DEBUG nova.compute.manager [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 821.638467] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ad25d6-7d91-4358-83be-3017a2641cba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.644064] env[63028]: DEBUG oslo_vmware.api [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735556, 'name': PowerOffVM_Task, 'duration_secs': 0.330363} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.644595] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 821.644764] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 821.645027] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7494f60-efbf-4c21-a012-55d2e4613c46 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.652956] env[63028]: DEBUG nova.policy [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32a3a4c8b9ef46bb9f1d927769b9dad9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d3152ab577947b28de82f4801285f8c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 821.708357] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 821.708591] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 821.708778] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Deleting the datastore file [datastore2] e346c31b-ef1b-4f75-8564-cefe26bd672f {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 821.709767] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bdf8110e-0394-4fe3-a9d0-e8bd265495a9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.716613] env[63028]: DEBUG oslo_vmware.api [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Waiting for the task: (returnval){ [ 821.716613] env[63028]: value = "task-2735558" [ 821.716613] env[63028]: _type = "Task" [ 821.716613] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.727676] env[63028]: DEBUG oslo_vmware.api [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735558, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.981675] env[63028]: DEBUG nova.network.neutron [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Successfully created port: d63bc515-e6bf-4c5c-88b1-4d1e21688dc4 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 822.024456] env[63028]: DEBUG nova.network.neutron [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.110603] env[63028]: DEBUG nova.compute.manager [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 822.167217] env[63028]: INFO nova.compute.manager [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Took 50.24 seconds to build instance. [ 822.234965] env[63028]: DEBUG oslo_vmware.api [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Task: {'id': task-2735558, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234048} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.235836] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 822.235836] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 822.235836] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 822.236339] env[63028]: INFO nova.compute.manager [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 822.236339] env[63028]: DEBUG oslo.service.loopingcall [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 822.236567] env[63028]: DEBUG nova.compute.manager [-] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 822.237016] env[63028]: DEBUG nova.network.neutron [-] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 822.325570] env[63028]: DEBUG nova.network.neutron [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Updating instance_info_cache with network_info: [{"id": "1e514afb-c9a6-43c6-bfec-ac1a32620674", "address": "fa:16:3e:62:4e:4d", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e514afb-c9", "ovs_interfaceid": "1e514afb-c9a6-43c6-bfec-ac1a32620674", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.672794] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9f608a-dd59-4deb-8ecf-94d85c2fa869 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "70888889-4965-47ab-ad47-59f1c1286bd8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.238s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.772705] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93fab7b0-32c7-48bb-8afa-405449cdcf5e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "70888889-4965-47ab-ad47-59f1c1286bd8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.772951] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93fab7b0-32c7-48bb-8afa-405449cdcf5e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "70888889-4965-47ab-ad47-59f1c1286bd8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.773153] env[63028]: DEBUG nova.compute.manager [None req-93fab7b0-32c7-48bb-8afa-405449cdcf5e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 822.774717] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996446ad-3e39-4a12-aa54-5b9a0dd3e207 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.781479] env[63028]: DEBUG nova.compute.manager [None req-93fab7b0-32c7-48bb-8afa-405449cdcf5e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63028) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 822.782801] env[63028]: DEBUG nova.objects.instance [None req-93fab7b0-32c7-48bb-8afa-405449cdcf5e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lazy-loading 'flavor' on Instance uuid 70888889-4965-47ab-ad47-59f1c1286bd8 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 822.789681] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7399005-12bf-4261-953a-7d78f0010903 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.798479] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98bc9c4-991b-46e5-8eac-0035770516ef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.834745] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "refresh_cache-514c83d1-4fb1-435c-8c25-aa112c744131" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.835174] env[63028]: DEBUG nova.compute.manager [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Instance network_info: |[{"id": "1e514afb-c9a6-43c6-bfec-ac1a32620674", "address": "fa:16:3e:62:4e:4d", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e514afb-c9", "ovs_interfaceid": "1e514afb-c9a6-43c6-bfec-ac1a32620674", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 822.835988] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:4e:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5446413d-c3b0-4cd2-a962-62240db178ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e514afb-c9a6-43c6-bfec-ac1a32620674', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 822.843510] env[63028]: DEBUG oslo.service.loopingcall [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 822.844566] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c3e00d-8fbd-4bba-a290-40150d121b67 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.847750] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 822.849224] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ecd3e6f-9546-4a89-930a-ab11cd4cfdeb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.866878] env[63028]: DEBUG nova.compute.manager [req-dbda4269-1649-486e-8fab-57314937f934 req-abe94ce8-e4ad-4a49-a2a6-503610a27c99 service nova] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Received event network-vif-deleted-a24d4572-d230-46e3-82e9-72efb20f6178 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 822.866918] env[63028]: INFO nova.compute.manager [req-dbda4269-1649-486e-8fab-57314937f934 req-abe94ce8-e4ad-4a49-a2a6-503610a27c99 service nova] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Neutron deleted interface a24d4572-d230-46e3-82e9-72efb20f6178; detaching it from the instance and deleting it from the info cache [ 822.867323] env[63028]: DEBUG nova.network.neutron [req-dbda4269-1649-486e-8fab-57314937f934 req-abe94ce8-e4ad-4a49-a2a6-503610a27c99 service nova] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.875190] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b37769-85fd-429c-be89-561ee27f4560 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.879980] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 822.879980] env[63028]: value = "task-2735559" [ 822.879980] env[63028]: _type = "Task" [ 822.879980] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.881087] env[63028]: DEBUG nova.compute.manager [req-a4c6f6f4-ef79-44d9-b9f3-b38d0f2f153c req-26394d56-e0b3-49f3-a452-a79823a31ab5 service nova] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Received event network-changed-1e514afb-c9a6-43c6-bfec-ac1a32620674 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 822.881352] env[63028]: DEBUG nova.compute.manager [req-a4c6f6f4-ef79-44d9-b9f3-b38d0f2f153c req-26394d56-e0b3-49f3-a452-a79823a31ab5 service nova] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Refreshing instance network info cache due to event network-changed-1e514afb-c9a6-43c6-bfec-ac1a32620674. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 822.881626] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4c6f6f4-ef79-44d9-b9f3-b38d0f2f153c req-26394d56-e0b3-49f3-a452-a79823a31ab5 service nova] Acquiring lock "refresh_cache-514c83d1-4fb1-435c-8c25-aa112c744131" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.882049] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4c6f6f4-ef79-44d9-b9f3-b38d0f2f153c req-26394d56-e0b3-49f3-a452-a79823a31ab5 service nova] Acquired lock "refresh_cache-514c83d1-4fb1-435c-8c25-aa112c744131" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.882187] env[63028]: DEBUG nova.network.neutron [req-a4c6f6f4-ef79-44d9-b9f3-b38d0f2f153c req-26394d56-e0b3-49f3-a452-a79823a31ab5 service nova] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Refreshing network info cache for port 1e514afb-c9a6-43c6-bfec-ac1a32620674 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 822.899199] env[63028]: DEBUG nova.compute.provider_tree [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.907329] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735559, 'name': CreateVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.121427] env[63028]: DEBUG nova.compute.manager [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 823.151857] env[63028]: DEBUG nova.virt.hardware [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 823.152201] env[63028]: DEBUG nova.virt.hardware [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 823.152435] env[63028]: DEBUG nova.virt.hardware [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 823.152729] env[63028]: DEBUG nova.virt.hardware [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 823.153303] env[63028]: DEBUG nova.virt.hardware [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 823.153392] env[63028]: DEBUG nova.virt.hardware [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 823.153716] env[63028]: DEBUG nova.virt.hardware [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 823.153950] env[63028]: DEBUG nova.virt.hardware [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 823.154232] env[63028]: DEBUG nova.virt.hardware [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 823.154438] env[63028]: DEBUG nova.virt.hardware [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 823.155077] env[63028]: DEBUG nova.virt.hardware [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 823.155671] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227c1407-0962-4b43-97b7-b87b855116e4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.164149] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f70f455-494a-4623-80de-5eaf8eadc6ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.178921] env[63028]: DEBUG nova.network.neutron [-] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.180551] env[63028]: DEBUG nova.compute.manager [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 823.373688] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f4a272b6-74dd-4e60-b025-b48f8f9ce587 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.386425] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c31328-b1cd-4a72-8893-3d5c0fef3d08 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.405766] env[63028]: DEBUG nova.scheduler.client.report [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 823.415015] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735559, 'name': CreateVM_Task, 'duration_secs': 0.507501} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.415503] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 823.416369] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.416559] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.416906] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 823.417503] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2259ab22-0d8e-40f4-80e6-fdd67f5784d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.438021] env[63028]: DEBUG nova.compute.manager [req-dbda4269-1649-486e-8fab-57314937f934 req-abe94ce8-e4ad-4a49-a2a6-503610a27c99 service nova] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Detach interface failed, port_id=a24d4572-d230-46e3-82e9-72efb20f6178, reason: Instance e346c31b-ef1b-4f75-8564-cefe26bd672f could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 823.439991] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 823.439991] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52dc62c9-88f4-e9df-97f0-b9a49bfb85cc" [ 823.439991] env[63028]: _type = "Task" [ 823.439991] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.452064] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52dc62c9-88f4-e9df-97f0-b9a49bfb85cc, 'name': SearchDatastore_Task, 'duration_secs': 0.014038} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.452434] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.452671] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 823.453142] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.453329] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.453516] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 823.453777] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a41ee56e-ad13-48c2-b79b-c26db79b582c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.462783] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 823.462980] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 823.463729] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ebe9b80-806f-4c47-9067-290e2e0449d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.469062] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 823.469062] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5266d7f3-50c8-02d7-3f51-f90120ab7ee0" [ 823.469062] env[63028]: _type = "Task" [ 823.469062] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.477704] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5266d7f3-50c8-02d7-3f51-f90120ab7ee0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.588454] env[63028]: DEBUG nova.network.neutron [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Successfully updated port: d63bc515-e6bf-4c5c-88b1-4d1e21688dc4 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 823.687037] env[63028]: INFO nova.compute.manager [-] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Took 1.45 seconds to deallocate network for instance. [ 823.699154] env[63028]: DEBUG nova.network.neutron [req-a4c6f6f4-ef79-44d9-b9f3-b38d0f2f153c req-26394d56-e0b3-49f3-a452-a79823a31ab5 service nova] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Updated VIF entry in instance network info cache for port 1e514afb-c9a6-43c6-bfec-ac1a32620674. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 823.699154] env[63028]: DEBUG nova.network.neutron [req-a4c6f6f4-ef79-44d9-b9f3-b38d0f2f153c req-26394d56-e0b3-49f3-a452-a79823a31ab5 service nova] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Updating instance_info_cache with network_info: [{"id": "1e514afb-c9a6-43c6-bfec-ac1a32620674", "address": "fa:16:3e:62:4e:4d", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e514afb-c9", "ovs_interfaceid": "1e514afb-c9a6-43c6-bfec-ac1a32620674", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.725354] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.792123] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-93fab7b0-32c7-48bb-8afa-405449cdcf5e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.793464] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6399bea-b484-461d-a957-1245b6db4a61 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.801544] env[63028]: DEBUG oslo_vmware.api [None req-93fab7b0-32c7-48bb-8afa-405449cdcf5e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 823.801544] env[63028]: value = "task-2735560" [ 823.801544] env[63028]: _type = "Task" [ 823.801544] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.811819] env[63028]: DEBUG oslo_vmware.api [None req-93fab7b0-32c7-48bb-8afa-405449cdcf5e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735560, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.911537] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.819s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.912220] env[63028]: DEBUG nova.compute.manager [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 823.916121] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.447s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.916121] env[63028]: DEBUG nova.objects.instance [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'resources' on Instance uuid 5a340e31-678c-437e-aa4e-07d5d9f4334f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 823.981679] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5266d7f3-50c8-02d7-3f51-f90120ab7ee0, 'name': SearchDatastore_Task, 'duration_secs': 0.01146} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.981813] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02f414a6-6262-49dc-a764-a7173fb46663 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.987034] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 823.987034] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ba74b8-2d4b-cb64-6d6d-e7105f177671" [ 823.987034] env[63028]: _type = "Task" [ 823.987034] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.995318] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ba74b8-2d4b-cb64-6d6d-e7105f177671, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.092407] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "refresh_cache-f4718363-73b2-4016-8849-f75e98259023" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.092572] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "refresh_cache-f4718363-73b2-4016-8849-f75e98259023" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.092729] env[63028]: DEBUG nova.network.neutron [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.202729] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.203190] env[63028]: DEBUG oslo_concurrency.lockutils [req-a4c6f6f4-ef79-44d9-b9f3-b38d0f2f153c req-26394d56-e0b3-49f3-a452-a79823a31ab5 service nova] Releasing lock "refresh_cache-514c83d1-4fb1-435c-8c25-aa112c744131" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.311757] env[63028]: DEBUG oslo_vmware.api [None req-93fab7b0-32c7-48bb-8afa-405449cdcf5e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735560, 'name': PowerOffVM_Task, 'duration_secs': 0.187184} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.312100] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-93fab7b0-32c7-48bb-8afa-405449cdcf5e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 824.312236] env[63028]: DEBUG nova.compute.manager [None req-93fab7b0-32c7-48bb-8afa-405449cdcf5e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 824.313025] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f35a6d7-d7e3-4a64-b1a8-7400f722650e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.421966] env[63028]: DEBUG nova.compute.utils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 824.425760] env[63028]: DEBUG nova.compute.manager [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 824.426253] env[63028]: DEBUG nova.network.neutron [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 824.480753] env[63028]: DEBUG nova.policy [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25218cd4756d409c9fee41c970fb2d32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e85128c5c889438bbb1df571b7756c6a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 824.498771] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ba74b8-2d4b-cb64-6d6d-e7105f177671, 'name': SearchDatastore_Task, 'duration_secs': 0.013005} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.501559] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.501834] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 514c83d1-4fb1-435c-8c25-aa112c744131/514c83d1-4fb1-435c-8c25-aa112c744131.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 824.502344] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-690e7ac4-a02e-4ea5-baca-af4055f7ffe3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.510045] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 824.510045] env[63028]: value = "task-2735561" [ 824.510045] env[63028]: _type = "Task" [ 824.510045] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.521580] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735561, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.644450] env[63028]: DEBUG nova.network.neutron [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.831160] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93fab7b0-32c7-48bb-8afa-405449cdcf5e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "70888889-4965-47ab-ad47-59f1c1286bd8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.058s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.848019] env[63028]: DEBUG nova.network.neutron [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Updating instance_info_cache with network_info: [{"id": "d63bc515-e6bf-4c5c-88b1-4d1e21688dc4", "address": "fa:16:3e:23:97:fb", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd63bc515-e6", "ovs_interfaceid": "d63bc515-e6bf-4c5c-88b1-4d1e21688dc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.910294] env[63028]: DEBUG nova.network.neutron [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Successfully created port: 6ecb125b-389c-4dce-8446-368a7298e497 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.927348] env[63028]: DEBUG nova.compute.manager [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 824.999248] env[63028]: DEBUG nova.compute.manager [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] [instance: f4718363-73b2-4016-8849-f75e98259023] Received event network-vif-plugged-d63bc515-e6bf-4c5c-88b1-4d1e21688dc4 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 824.999720] env[63028]: DEBUG oslo_concurrency.lockutils [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] Acquiring lock "f4718363-73b2-4016-8849-f75e98259023-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.000086] env[63028]: DEBUG oslo_concurrency.lockutils [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] Lock "f4718363-73b2-4016-8849-f75e98259023-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.000377] env[63028]: DEBUG oslo_concurrency.lockutils [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] Lock "f4718363-73b2-4016-8849-f75e98259023-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.000695] env[63028]: DEBUG nova.compute.manager [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] [instance: f4718363-73b2-4016-8849-f75e98259023] No waiting events found dispatching network-vif-plugged-d63bc515-e6bf-4c5c-88b1-4d1e21688dc4 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 825.000977] env[63028]: WARNING nova.compute.manager [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] [instance: f4718363-73b2-4016-8849-f75e98259023] Received unexpected event network-vif-plugged-d63bc515-e6bf-4c5c-88b1-4d1e21688dc4 for instance with vm_state building and task_state spawning. [ 825.001278] env[63028]: DEBUG nova.compute.manager [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] [instance: f4718363-73b2-4016-8849-f75e98259023] Received event network-changed-d63bc515-e6bf-4c5c-88b1-4d1e21688dc4 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 825.001591] env[63028]: DEBUG nova.compute.manager [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] [instance: f4718363-73b2-4016-8849-f75e98259023] Refreshing instance network info cache due to event network-changed-d63bc515-e6bf-4c5c-88b1-4d1e21688dc4. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 825.001852] env[63028]: DEBUG oslo_concurrency.lockutils [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] Acquiring lock "refresh_cache-f4718363-73b2-4016-8849-f75e98259023" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.025093] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735561, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.030661] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d8b192-528d-4d3b-9794-de587f591418 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.039069] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9fd461-b919-45ff-b616-47be3df17453 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.073682] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca866c5-121e-498f-b92d-2d6f7d3e472d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.093520] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92005e2b-8ab9-4a96-ab40-fc0b7e144b57 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.108814] env[63028]: DEBUG nova.compute.provider_tree [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.351738] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "refresh_cache-f4718363-73b2-4016-8849-f75e98259023" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.352927] env[63028]: DEBUG nova.compute.manager [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Instance network_info: |[{"id": "d63bc515-e6bf-4c5c-88b1-4d1e21688dc4", "address": "fa:16:3e:23:97:fb", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd63bc515-e6", "ovs_interfaceid": "d63bc515-e6bf-4c5c-88b1-4d1e21688dc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 825.353287] env[63028]: DEBUG oslo_concurrency.lockutils [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] Acquired lock "refresh_cache-f4718363-73b2-4016-8849-f75e98259023" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.353757] env[63028]: DEBUG nova.network.neutron [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] [instance: f4718363-73b2-4016-8849-f75e98259023] Refreshing network info cache for port d63bc515-e6bf-4c5c-88b1-4d1e21688dc4 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 825.354752] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:97:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd63bc515-e6bf-4c5c-88b1-4d1e21688dc4', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 825.363195] env[63028]: DEBUG oslo.service.loopingcall [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 825.363672] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4718363-73b2-4016-8849-f75e98259023] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 825.363902] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4fd156f-61d2-4cdc-889f-3c92c5db9cfc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.385216] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 825.385216] env[63028]: value = "task-2735562" [ 825.385216] env[63028]: _type = "Task" [ 825.385216] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.394900] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735562, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.522964] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735561, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6418} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.523425] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 514c83d1-4fb1-435c-8c25-aa112c744131/514c83d1-4fb1-435c-8c25-aa112c744131.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 825.523834] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 825.524148] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4204abbc-5f92-44c9-be4c-62eaead384c6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.531537] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 825.531537] env[63028]: value = "task-2735563" [ 825.531537] env[63028]: _type = "Task" [ 825.531537] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.539624] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735563, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.612566] env[63028]: DEBUG nova.scheduler.client.report [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 825.661342] env[63028]: INFO nova.compute.manager [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Rebuilding instance [ 825.702496] env[63028]: DEBUG nova.compute.manager [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 825.703385] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5466fd31-045f-40bd-8448-97bfb2e5c727 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.895692] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735562, 'name': CreateVM_Task, 'duration_secs': 0.349446} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.899665] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4718363-73b2-4016-8849-f75e98259023] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 825.899665] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.899665] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.899665] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 825.899919] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4b3195f-9484-4ea3-aa18-6e68267be86e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.904764] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 825.904764] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52afb1bb-6e1e-35ca-770e-504d22b9287f" [ 825.904764] env[63028]: _type = "Task" [ 825.904764] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.912891] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52afb1bb-6e1e-35ca-770e-504d22b9287f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.941055] env[63028]: DEBUG nova.compute.manager [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 825.972601] env[63028]: DEBUG nova.virt.hardware [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.972851] env[63028]: DEBUG nova.virt.hardware [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.973019] env[63028]: DEBUG nova.virt.hardware [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.973211] env[63028]: DEBUG nova.virt.hardware [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.973359] env[63028]: DEBUG nova.virt.hardware [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.973506] env[63028]: DEBUG nova.virt.hardware [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.973715] env[63028]: DEBUG nova.virt.hardware [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.973947] env[63028]: DEBUG nova.virt.hardware [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.974150] env[63028]: DEBUG nova.virt.hardware [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.974380] env[63028]: DEBUG nova.virt.hardware [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.974589] env[63028]: DEBUG nova.virt.hardware [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.975454] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e351e880-af25-43eb-be44-ac2c101c4efd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.984579] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75445bbe-3414-4a24-a631-0ac3ebbc1b71 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.042150] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735563, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088056} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.042490] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 826.043310] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa5dd76-a203-4d0b-8676-1d6237894ef7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.068542] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 514c83d1-4fb1-435c-8c25-aa112c744131/514c83d1-4fb1-435c-8c25-aa112c744131.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.071782] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97f25c78-4e7f-459d-8a74-ad8486751f9d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.092884] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 826.092884] env[63028]: value = "task-2735564" [ 826.092884] env[63028]: _type = "Task" [ 826.092884] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.101127] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735564, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.118333] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.202s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.123619] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.406s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.125412] env[63028]: INFO nova.compute.claims [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.149848] env[63028]: INFO nova.scheduler.client.report [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Deleted allocations for instance 5a340e31-678c-437e-aa4e-07d5d9f4334f [ 826.161986] env[63028]: DEBUG nova.network.neutron [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] [instance: f4718363-73b2-4016-8849-f75e98259023] Updated VIF entry in instance network info cache for port d63bc515-e6bf-4c5c-88b1-4d1e21688dc4. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 826.162311] env[63028]: DEBUG nova.network.neutron [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] [instance: f4718363-73b2-4016-8849-f75e98259023] Updating instance_info_cache with network_info: [{"id": "d63bc515-e6bf-4c5c-88b1-4d1e21688dc4", "address": "fa:16:3e:23:97:fb", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd63bc515-e6", "ovs_interfaceid": "d63bc515-e6bf-4c5c-88b1-4d1e21688dc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.415398] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52afb1bb-6e1e-35ca-770e-504d22b9287f, 'name': SearchDatastore_Task, 'duration_secs': 0.011846} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.415709] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.415929] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 826.416180] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.416330] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.416506] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 826.416763] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29dba8b8-6cd6-42cb-8723-3cc12fd59c60 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.425014] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 826.425257] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 826.425982] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8487eb4a-e0f1-4bf5-a3aa-90c771d6a1ba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.433165] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 826.433165] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d026a2-8fed-1599-982b-20e289f0bf96" [ 826.433165] env[63028]: _type = "Task" [ 826.433165] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.440967] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d026a2-8fed-1599-982b-20e289f0bf96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.487585] env[63028]: DEBUG nova.compute.manager [req-d5e6a110-bc62-4bbc-9eef-cd54afcaa152 req-cfbb97c6-d81d-4281-87a2-d4e2f58f6e7e service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Received event network-vif-plugged-6ecb125b-389c-4dce-8446-368a7298e497 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 826.487960] env[63028]: DEBUG oslo_concurrency.lockutils [req-d5e6a110-bc62-4bbc-9eef-cd54afcaa152 req-cfbb97c6-d81d-4281-87a2-d4e2f58f6e7e service nova] Acquiring lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.488389] env[63028]: DEBUG oslo_concurrency.lockutils [req-d5e6a110-bc62-4bbc-9eef-cd54afcaa152 req-cfbb97c6-d81d-4281-87a2-d4e2f58f6e7e service nova] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.488389] env[63028]: DEBUG oslo_concurrency.lockutils [req-d5e6a110-bc62-4bbc-9eef-cd54afcaa152 req-cfbb97c6-d81d-4281-87a2-d4e2f58f6e7e service nova] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.488573] env[63028]: DEBUG nova.compute.manager [req-d5e6a110-bc62-4bbc-9eef-cd54afcaa152 req-cfbb97c6-d81d-4281-87a2-d4e2f58f6e7e service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] No waiting events found dispatching network-vif-plugged-6ecb125b-389c-4dce-8446-368a7298e497 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 826.488757] env[63028]: WARNING nova.compute.manager [req-d5e6a110-bc62-4bbc-9eef-cd54afcaa152 req-cfbb97c6-d81d-4281-87a2-d4e2f58f6e7e service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Received unexpected event network-vif-plugged-6ecb125b-389c-4dce-8446-368a7298e497 for instance with vm_state building and task_state spawning. [ 826.536943] env[63028]: DEBUG nova.network.neutron [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Successfully updated port: 6ecb125b-389c-4dce-8446-368a7298e497 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 826.608197] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735564, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.657944] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7853024a-9db9-44e7-be13-cc5f0e4f4703 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "5a340e31-678c-437e-aa4e-07d5d9f4334f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.015s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.665906] env[63028]: DEBUG oslo_concurrency.lockutils [req-fe83b243-cc6d-431c-95b7-15c4a8e2f70f req-8f1170c2-e5a2-4fca-a95e-4d9b78b9c58a service nova] Releasing lock "refresh_cache-f4718363-73b2-4016-8849-f75e98259023" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.716457] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 826.716788] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-184cea98-9d8f-4ba4-8f04-b46e2a5fa815 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.723345] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 826.723345] env[63028]: value = "task-2735565" [ 826.723345] env[63028]: _type = "Task" [ 826.723345] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.731573] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.943610] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d026a2-8fed-1599-982b-20e289f0bf96, 'name': SearchDatastore_Task, 'duration_secs': 0.009391} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.945049] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-249ea9da-c600-4595-8a5b-5a182ee2141a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.949706] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 826.949706] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524999bb-9c77-418b-9d4b-32a032d71ce4" [ 826.949706] env[63028]: _type = "Task" [ 826.949706] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.957358] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524999bb-9c77-418b-9d4b-32a032d71ce4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.040074] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.040301] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.040461] env[63028]: DEBUG nova.network.neutron [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 827.105646] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735564, 'name': ReconfigVM_Task, 'duration_secs': 0.660664} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.105948] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 514c83d1-4fb1-435c-8c25-aa112c744131/514c83d1-4fb1-435c-8c25-aa112c744131.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.106654] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a208178a-0d5b-469d-b7c7-181acd757b14 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.112931] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 827.112931] env[63028]: value = "task-2735566" [ 827.112931] env[63028]: _type = "Task" [ 827.112931] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.123181] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735566, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.239453] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 827.239453] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 827.239931] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a2a457-658f-444f-bbb8-22ad22580513 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.248269] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 827.252207] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09098207-a440-416d-ac2c-844994ee1dba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.325885] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 827.326594] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 827.326594] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleting the datastore file [datastore2] 70888889-4965-47ab-ad47-59f1c1286bd8 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 827.326724] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b916c4d7-fd52-439f-86fa-a21499ca3761 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.337120] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 827.337120] env[63028]: value = "task-2735568" [ 827.337120] env[63028]: _type = "Task" [ 827.337120] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.345291] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.465320] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524999bb-9c77-418b-9d4b-32a032d71ce4, 'name': SearchDatastore_Task, 'duration_secs': 0.012165} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.465645] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.466635] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] f4718363-73b2-4016-8849-f75e98259023/f4718363-73b2-4016-8849-f75e98259023.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 827.466635] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4731bae0-8297-4e4d-a469-09745274c990 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.473181] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 827.473181] env[63028]: value = "task-2735569" [ 827.473181] env[63028]: _type = "Task" [ 827.473181] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.486451] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.588472] env[63028]: DEBUG nova.network.neutron [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.623524] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735566, 'name': Rename_Task, 'duration_secs': 0.205681} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.623793] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 827.624042] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-408d31c0-4dea-40cf-9de3-04c4a68bc64f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.630121] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 827.630121] env[63028]: value = "task-2735570" [ 827.630121] env[63028]: _type = "Task" [ 827.630121] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.637768] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735570, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.705968] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c41335-1187-4178-b26d-4ccd93062b4f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.713182] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14606669-9a48-4eb3-a270-5d60fbbf44ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.747167] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda510ca-6d70-419a-b811-421a9cf25a6d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.755437] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0cacaf-f137-42b8-8e67-f74d1e219434 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.770971] env[63028]: DEBUG nova.compute.provider_tree [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.793025] env[63028]: DEBUG nova.network.neutron [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance_info_cache with network_info: [{"id": "6ecb125b-389c-4dce-8446-368a7298e497", "address": "fa:16:3e:f4:06:c4", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecb125b-38", "ovs_interfaceid": "6ecb125b-389c-4dce-8446-368a7298e497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.850438] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.34136} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.850818] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 827.851118] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 827.851389] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 827.985199] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735569, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.143104] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735570, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.274340] env[63028]: DEBUG nova.scheduler.client.report [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 828.298247] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.299026] env[63028]: DEBUG nova.compute.manager [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Instance network_info: |[{"id": "6ecb125b-389c-4dce-8446-368a7298e497", "address": "fa:16:3e:f4:06:c4", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecb125b-38", "ovs_interfaceid": "6ecb125b-389c-4dce-8446-368a7298e497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 828.299189] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:06:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ecb125b-389c-4dce-8446-368a7298e497', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 828.307062] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Creating folder: Project (e85128c5c889438bbb1df571b7756c6a). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 828.307560] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93a897f7-ac73-4850-88a3-54157812a6a7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.319070] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Created folder: Project (e85128c5c889438bbb1df571b7756c6a) in parent group-v550570. [ 828.319255] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Creating folder: Instances. Parent ref: group-v550763. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 828.319523] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77cec051-292b-404e-ab62-c6ae477686e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.329458] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Created folder: Instances in parent group-v550763. [ 828.329694] env[63028]: DEBUG oslo.service.loopingcall [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 828.329881] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 828.330135] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d09a7fb-cda4-4b3c-bceb-8b4ab468c73b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.349253] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 828.349253] env[63028]: value = "task-2735573" [ 828.349253] env[63028]: _type = "Task" [ 828.349253] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.357755] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735573, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.485775] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735569, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.714031} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.486075] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] f4718363-73b2-4016-8849-f75e98259023/f4718363-73b2-4016-8849-f75e98259023.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 828.486210] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 828.486481] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25a96e3a-da42-43e9-9871-6a5d02085064 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.493981] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 828.493981] env[63028]: value = "task-2735574" [ 828.493981] env[63028]: _type = "Task" [ 828.493981] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.502113] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735574, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.544448] env[63028]: DEBUG nova.compute.manager [req-97ccf32c-7623-4835-b555-7c33a515914e req-4544a200-09fa-400d-95bc-0247c0df740a service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Received event network-changed-6ecb125b-389c-4dce-8446-368a7298e497 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 828.544671] env[63028]: DEBUG nova.compute.manager [req-97ccf32c-7623-4835-b555-7c33a515914e req-4544a200-09fa-400d-95bc-0247c0df740a service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Refreshing instance network info cache due to event network-changed-6ecb125b-389c-4dce-8446-368a7298e497. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 828.544932] env[63028]: DEBUG oslo_concurrency.lockutils [req-97ccf32c-7623-4835-b555-7c33a515914e req-4544a200-09fa-400d-95bc-0247c0df740a service nova] Acquiring lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.545334] env[63028]: DEBUG oslo_concurrency.lockutils [req-97ccf32c-7623-4835-b555-7c33a515914e req-4544a200-09fa-400d-95bc-0247c0df740a service nova] Acquired lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.545334] env[63028]: DEBUG nova.network.neutron [req-97ccf32c-7623-4835-b555-7c33a515914e req-4544a200-09fa-400d-95bc-0247c0df740a service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Refreshing network info cache for port 6ecb125b-389c-4dce-8446-368a7298e497 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 828.641636] env[63028]: DEBUG oslo_vmware.api [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735570, 'name': PowerOnVM_Task, 'duration_secs': 0.541321} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.641827] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 828.642011] env[63028]: INFO nova.compute.manager [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Took 8.26 seconds to spawn the instance on the hypervisor. [ 828.642204] env[63028]: DEBUG nova.compute.manager [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.642985] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e37504-92b5-4045-930b-f8d55bfa1df5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.780725] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.781359] env[63028]: DEBUG nova.compute.manager [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 828.785048] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.589s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.787054] env[63028]: INFO nova.compute.claims [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.859663] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735573, 'name': CreateVM_Task, 'duration_secs': 0.39301} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.859860] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 828.860593] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.860764] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.861117] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 828.861382] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b59e51c-71f6-462a-a6ad-ff3cd51a5cd8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.870438] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 828.870438] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b679c5-cad0-4348-41f1-5c7af446bbd7" [ 828.870438] env[63028]: _type = "Task" [ 828.870438] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.878375] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b679c5-cad0-4348-41f1-5c7af446bbd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.894023] env[63028]: DEBUG nova.virt.hardware [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 828.894023] env[63028]: DEBUG nova.virt.hardware [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.894201] env[63028]: DEBUG nova.virt.hardware [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 828.894349] env[63028]: DEBUG nova.virt.hardware [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.894497] env[63028]: DEBUG nova.virt.hardware [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 828.894642] env[63028]: DEBUG nova.virt.hardware [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 828.894846] env[63028]: DEBUG nova.virt.hardware [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 828.895055] env[63028]: DEBUG nova.virt.hardware [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 828.895235] env[63028]: DEBUG nova.virt.hardware [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 828.895400] env[63028]: DEBUG nova.virt.hardware [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 828.895605] env[63028]: DEBUG nova.virt.hardware [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 828.896813] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b33c270-f1f4-4a44-974e-a02a6a7db9e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.904286] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70cef4b-ef94-4096-b85d-da4ebf2059e5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.919876] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:a5:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 828.927512] env[63028]: DEBUG oslo.service.loopingcall [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 828.927777] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 828.928000] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e2a90f1-92f3-4515-8a68-873d89a3fb53 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.948273] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 828.948273] env[63028]: value = "task-2735575" [ 828.948273] env[63028]: _type = "Task" [ 828.948273] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.956109] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735575, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.004047] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735574, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07232} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.004047] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 829.004716] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c57efd-5e4d-4134-a21f-dd8ceb71613d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.028573] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] f4718363-73b2-4016-8849-f75e98259023/f4718363-73b2-4016-8849-f75e98259023.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 829.028917] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-501e28cb-7d39-4ab5-9bfb-fc51dd49edd9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.052721] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 829.052721] env[63028]: value = "task-2735576" [ 829.052721] env[63028]: _type = "Task" [ 829.052721] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.060876] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735576, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.148243] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.148555] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.165492] env[63028]: INFO nova.compute.manager [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Took 49.75 seconds to build instance. [ 829.293167] env[63028]: DEBUG nova.compute.utils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 829.297811] env[63028]: DEBUG nova.compute.manager [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 829.298178] env[63028]: DEBUG nova.network.neutron [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 829.306366] env[63028]: DEBUG nova.network.neutron [req-97ccf32c-7623-4835-b555-7c33a515914e req-4544a200-09fa-400d-95bc-0247c0df740a service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updated VIF entry in instance network info cache for port 6ecb125b-389c-4dce-8446-368a7298e497. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 829.306727] env[63028]: DEBUG nova.network.neutron [req-97ccf32c-7623-4835-b555-7c33a515914e req-4544a200-09fa-400d-95bc-0247c0df740a service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance_info_cache with network_info: [{"id": "6ecb125b-389c-4dce-8446-368a7298e497", "address": "fa:16:3e:f4:06:c4", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecb125b-38", "ovs_interfaceid": "6ecb125b-389c-4dce-8446-368a7298e497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.341579] env[63028]: DEBUG nova.policy [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '80457ede6ae84c748d67e550a68387e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e9b6ca6cccb940f0a516e265a721fd03', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 829.379344] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b679c5-cad0-4348-41f1-5c7af446bbd7, 'name': SearchDatastore_Task, 'duration_secs': 0.020074} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.379715] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.379992] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 829.380324] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.380510] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.380722] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 829.381036] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d3904a5-4471-4ed4-ad43-b403f833b5d5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.390659] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 829.390921] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 829.392053] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce429182-7cc3-4877-9785-8ea73775f5f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.397335] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 829.397335] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5260b83f-a9f9-710d-a812-8cf4bb4da020" [ 829.397335] env[63028]: _type = "Task" [ 829.397335] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.405367] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5260b83f-a9f9-710d-a812-8cf4bb4da020, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.458933] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735575, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.566331] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735576, 'name': ReconfigVM_Task, 'duration_secs': 0.345633} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.570017] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Reconfigured VM instance instance-0000003d to attach disk [datastore2] f4718363-73b2-4016-8849-f75e98259023/f4718363-73b2-4016-8849-f75e98259023.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 829.570017] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c3bb1e9-dc1d-4ea8-8ef9-40032435fed7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.577857] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 829.577857] env[63028]: value = "task-2735577" [ 829.577857] env[63028]: _type = "Task" [ 829.577857] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.589154] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735577, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.673840] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c668e099-ea76-49a7-8eba-290ebc50fa5f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "514c83d1-4fb1-435c-8c25-aa112c744131" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.835s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.719140] env[63028]: DEBUG nova.network.neutron [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Successfully created port: 333fcbb4-150d-4ee3-bd25-8ef1d049b40d {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 829.800034] env[63028]: DEBUG nova.compute.manager [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 829.811067] env[63028]: DEBUG oslo_concurrency.lockutils [req-97ccf32c-7623-4835-b555-7c33a515914e req-4544a200-09fa-400d-95bc-0247c0df740a service nova] Releasing lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.908178] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5260b83f-a9f9-710d-a812-8cf4bb4da020, 'name': SearchDatastore_Task, 'duration_secs': 0.009836} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.911349] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10171126-dfcd-4012-b38e-7c9117949723 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.916461] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 829.916461] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52776c72-254f-ba76-9a91-501108d24d42" [ 829.916461] env[63028]: _type = "Task" [ 829.916461] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.928034] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52776c72-254f-ba76-9a91-501108d24d42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.958386] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735575, 'name': CreateVM_Task, 'duration_secs': 0.51534} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.959330] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 829.959330] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.959601] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.959706] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 829.962246] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beb37739-26cf-461e-ad38-4df5507b856a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.967131] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 829.967131] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527fe164-64e1-10dd-d8a7-b5d37c78f676" [ 829.967131] env[63028]: _type = "Task" [ 829.967131] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.974826] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527fe164-64e1-10dd-d8a7-b5d37c78f676, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.089947] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735577, 'name': Rename_Task, 'duration_secs': 0.171429} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.090294] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 830.090575] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-adf2b945-69e8-4b4c-8113-0dca7a0806dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.097387] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 830.097387] env[63028]: value = "task-2735578" [ 830.097387] env[63028]: _type = "Task" [ 830.097387] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.107181] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735578, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.177089] env[63028]: DEBUG nova.compute.manager [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 830.310520] env[63028]: DEBUG oslo_vmware.rw_handles [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52984004-3adf-7da2-b424-e42ae4ceec18/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 830.314659] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8f3ca2-6977-46b7-925f-0bb08228f586 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.324395] env[63028]: DEBUG oslo_vmware.rw_handles [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52984004-3adf-7da2-b424-e42ae4ceec18/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 830.324572] env[63028]: ERROR oslo_vmware.rw_handles [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52984004-3adf-7da2-b424-e42ae4ceec18/disk-0.vmdk due to incomplete transfer. [ 830.324803] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-34c4d46c-0d1d-4715-a11c-55d9e948139b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.333562] env[63028]: DEBUG oslo_vmware.rw_handles [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52984004-3adf-7da2-b424-e42ae4ceec18/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 830.333562] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Uploaded image e5fa6135-d800-4c70-a08e-8ec0f4d40fa6 to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 830.337121] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 830.337121] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-206a1db7-f850-42f6-8c54-14b436f53c9f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.341482] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 830.341482] env[63028]: value = "task-2735579" [ 830.341482] env[63028]: _type = "Task" [ 830.341482] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.353612] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735579, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.371153] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e848036-d6d9-4e50-93ce-84f24e9082d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.378792] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eadba0e6-8855-45bd-9e30-aceaa9a37292 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.410260] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21343f39-74b8-467c-8bd6-48a9fdc175f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.417620] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94d1c51-cbfd-4dab-b7ac-ac76ce64077a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.433535] env[63028]: DEBUG nova.compute.provider_tree [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.438945] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52776c72-254f-ba76-9a91-501108d24d42, 'name': SearchDatastore_Task, 'duration_secs': 0.010999} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.439211] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.439463] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7/c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 830.439693] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66c58d81-bc9c-4104-9363-622cbeacc49f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.446188] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 830.446188] env[63028]: value = "task-2735580" [ 830.446188] env[63028]: _type = "Task" [ 830.446188] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.454603] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735580, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.477064] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527fe164-64e1-10dd-d8a7-b5d37c78f676, 'name': SearchDatastore_Task, 'duration_secs': 0.01021} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.477368] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.477607] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 830.477838] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.477984] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.478212] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.478479] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f24443d-d4ce-4c71-a1c1-d6b4c412286d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.487256] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.487256] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 830.487406] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b46b096-3b95-4ffb-bc6e-e4c0f1dbe638 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.493017] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 830.493017] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b57ee1-6610-74cc-ac6b-0894caf8d1a5" [ 830.493017] env[63028]: _type = "Task" [ 830.493017] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.499628] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b57ee1-6610-74cc-ac6b-0894caf8d1a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.607525] env[63028]: DEBUG oslo_vmware.api [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735578, 'name': PowerOnVM_Task, 'duration_secs': 0.467499} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.607898] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 830.608175] env[63028]: INFO nova.compute.manager [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Took 7.49 seconds to spawn the instance on the hypervisor. [ 830.608360] env[63028]: DEBUG nova.compute.manager [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 830.609651] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d7946f-5988-4766-947c-c8e671ba8f55 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.702127] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.819753] env[63028]: DEBUG nova.compute.manager [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 830.842894] env[63028]: DEBUG nova.virt.hardware [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 830.843184] env[63028]: DEBUG nova.virt.hardware [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.843349] env[63028]: DEBUG nova.virt.hardware [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 830.843538] env[63028]: DEBUG nova.virt.hardware [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.843685] env[63028]: DEBUG nova.virt.hardware [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 830.843834] env[63028]: DEBUG nova.virt.hardware [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 830.844060] env[63028]: DEBUG nova.virt.hardware [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 830.844243] env[63028]: DEBUG nova.virt.hardware [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 830.844391] env[63028]: DEBUG nova.virt.hardware [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 830.844675] env[63028]: DEBUG nova.virt.hardware [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 830.844847] env[63028]: DEBUG nova.virt.hardware [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 830.846152] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70626c0-c82a-4715-91ea-09c676056643 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.861883] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497a7380-fa29-4a17-b64c-d7430d6600ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.866169] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735579, 'name': Destroy_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.937296] env[63028]: DEBUG nova.scheduler.client.report [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 830.955092] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735580, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487268} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.955933] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7/c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 830.956172] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 830.956428] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0f3047f3-e63e-491c-8d1a-a1412f58412b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.962633] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 830.962633] env[63028]: value = "task-2735581" [ 830.962633] env[63028]: _type = "Task" [ 830.962633] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.970435] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735581, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.002095] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b57ee1-6610-74cc-ac6b-0894caf8d1a5, 'name': SearchDatastore_Task, 'duration_secs': 0.009057} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.002893] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edabaee1-689a-44cc-a146-5b948f1e1ed1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.007985] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 831.007985] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52547561-c493-8749-90bb-776243fdc092" [ 831.007985] env[63028]: _type = "Task" [ 831.007985] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.015485] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52547561-c493-8749-90bb-776243fdc092, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.136218] env[63028]: INFO nova.compute.manager [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Took 50.97 seconds to build instance. [ 831.297544] env[63028]: DEBUG nova.compute.manager [req-68522cbc-19fa-41a5-a181-00875af790e1 req-576984f2-578b-4dc3-b0db-27bfb6f88dfe service nova] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Received event network-vif-plugged-333fcbb4-150d-4ee3-bd25-8ef1d049b40d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 831.297768] env[63028]: DEBUG oslo_concurrency.lockutils [req-68522cbc-19fa-41a5-a181-00875af790e1 req-576984f2-578b-4dc3-b0db-27bfb6f88dfe service nova] Acquiring lock "56d39801-f3e7-4cfe-a038-6a5e762bfda8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.298054] env[63028]: DEBUG oslo_concurrency.lockutils [req-68522cbc-19fa-41a5-a181-00875af790e1 req-576984f2-578b-4dc3-b0db-27bfb6f88dfe service nova] Lock "56d39801-f3e7-4cfe-a038-6a5e762bfda8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.298220] env[63028]: DEBUG oslo_concurrency.lockutils [req-68522cbc-19fa-41a5-a181-00875af790e1 req-576984f2-578b-4dc3-b0db-27bfb6f88dfe service nova] Lock "56d39801-f3e7-4cfe-a038-6a5e762bfda8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.298401] env[63028]: DEBUG nova.compute.manager [req-68522cbc-19fa-41a5-a181-00875af790e1 req-576984f2-578b-4dc3-b0db-27bfb6f88dfe service nova] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] No waiting events found dispatching network-vif-plugged-333fcbb4-150d-4ee3-bd25-8ef1d049b40d {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 831.298564] env[63028]: WARNING nova.compute.manager [req-68522cbc-19fa-41a5-a181-00875af790e1 req-576984f2-578b-4dc3-b0db-27bfb6f88dfe service nova] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Received unexpected event network-vif-plugged-333fcbb4-150d-4ee3-bd25-8ef1d049b40d for instance with vm_state building and task_state spawning. [ 831.354875] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735579, 'name': Destroy_Task, 'duration_secs': 0.642255} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.355136] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Destroyed the VM [ 831.355376] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 831.355621] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-338a20fa-0e16-4648-9210-4226ab360a4e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.361561] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 831.361561] env[63028]: value = "task-2735582" [ 831.361561] env[63028]: _type = "Task" [ 831.361561] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.369396] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735582, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.378746] env[63028]: DEBUG nova.network.neutron [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Successfully updated port: 333fcbb4-150d-4ee3-bd25-8ef1d049b40d {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.403812] env[63028]: INFO nova.compute.manager [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Rebuilding instance [ 831.442596] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.443146] env[63028]: DEBUG nova.compute.manager [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 831.447722] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.922s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.447941] env[63028]: DEBUG nova.objects.instance [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lazy-loading 'resources' on Instance uuid 1af19279-e75b-4ec5-91f1-a0a101b229b2 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 831.452704] env[63028]: DEBUG nova.compute.manager [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 831.453550] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcad56d-10f6-4848-b663-c34ff836a958 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.472355] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735581, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.218113} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.472607] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 831.473379] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f2adf5-5f8e-407c-83fc-deeb55028406 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.498112] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7/c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.499233] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dcbc596a-2610-47e3-9c09-08839b862f7b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.524696] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52547561-c493-8749-90bb-776243fdc092, 'name': SearchDatastore_Task, 'duration_secs': 0.00904} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.526159] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.526538] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 70888889-4965-47ab-ad47-59f1c1286bd8/70888889-4965-47ab-ad47-59f1c1286bd8.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 831.526931] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 831.526931] env[63028]: value = "task-2735583" [ 831.526931] env[63028]: _type = "Task" [ 831.526931] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.527145] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a870f23-8974-4553-8d5a-1916bd3a54e8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.538113] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735583, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.539525] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 831.539525] env[63028]: value = "task-2735584" [ 831.539525] env[63028]: _type = "Task" [ 831.539525] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.547576] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.642241] env[63028]: DEBUG oslo_concurrency.lockutils [None req-164a375f-5a7c-48ea-8de2-0424503d2164 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "f4718363-73b2-4016-8849-f75e98259023" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.121s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.815916] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9b460c26-89fc-48c0-b7a6-dc971897af28 tempest-ServersAdminTestJSON-1015437815 tempest-ServersAdminTestJSON-1015437815-project-admin] Acquiring lock "refresh_cache-f4718363-73b2-4016-8849-f75e98259023" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.815977] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9b460c26-89fc-48c0-b7a6-dc971897af28 tempest-ServersAdminTestJSON-1015437815 tempest-ServersAdminTestJSON-1015437815-project-admin] Acquired lock "refresh_cache-f4718363-73b2-4016-8849-f75e98259023" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.816164] env[63028]: DEBUG nova.network.neutron [None req-9b460c26-89fc-48c0-b7a6-dc971897af28 tempest-ServersAdminTestJSON-1015437815 tempest-ServersAdminTestJSON-1015437815-project-admin] [instance: f4718363-73b2-4016-8849-f75e98259023] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.871746] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735582, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.881666] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "refresh_cache-56d39801-f3e7-4cfe-a038-6a5e762bfda8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.881832] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquired lock "refresh_cache-56d39801-f3e7-4cfe-a038-6a5e762bfda8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.882161] env[63028]: DEBUG nova.network.neutron [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.950870] env[63028]: DEBUG nova.compute.utils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 831.952567] env[63028]: DEBUG nova.compute.manager [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Not allocating networking since 'none' was specified. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 832.040341] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735583, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.052511] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735584, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.144647] env[63028]: DEBUG nova.compute.manager [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 832.373237] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735582, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.422124] env[63028]: DEBUG nova.network.neutron [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.457529] env[63028]: DEBUG nova.compute.manager [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 832.472010] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 832.475906] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99dd0d26-1532-495f-9c5a-f818119e3c9f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.480172] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14287a02-b80f-4437-b57b-f66930f7b6a7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.486475] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 832.486475] env[63028]: value = "task-2735585" [ 832.486475] env[63028]: _type = "Task" [ 832.486475] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.493314] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72356f77-07a2-48a1-b45c-f463aa632657 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.500650] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735585, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.531973] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb810d7-4122-4865-a112-b59e2d8b1ae8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.548636] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7743b53-8ab8-401d-b78e-f0947fc32f47 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.552780] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735583, 'name': ReconfigVM_Task, 'duration_secs': 0.94721} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.553071] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Reconfigured VM instance instance-0000003e to attach disk [datastore2] c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7/c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 832.555339] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a1c6379-5c0d-4b25-97d5-0c37fdeaec1d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.558463] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735584, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563587} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.566242] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 70888889-4965-47ab-ad47-59f1c1286bd8/70888889-4965-47ab-ad47-59f1c1286bd8.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 832.566470] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 832.566929] env[63028]: DEBUG nova.compute.provider_tree [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.570953] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0adfc40-6cfc-45a8-bb04-95814ca7c85c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.574377] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 832.574377] env[63028]: value = "task-2735586" [ 832.574377] env[63028]: _type = "Task" [ 832.574377] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.581679] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 832.581679] env[63028]: value = "task-2735587" [ 832.581679] env[63028]: _type = "Task" [ 832.581679] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.588042] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735586, 'name': Rename_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.593912] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735587, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.608138] env[63028]: DEBUG nova.network.neutron [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Updating instance_info_cache with network_info: [{"id": "333fcbb4-150d-4ee3-bd25-8ef1d049b40d", "address": "fa:16:3e:f7:71:1a", "network": {"id": "95ccbf6d-2bd9-42ff-93f9-5f9f541e5ba1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-863839356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9b6ca6cccb940f0a516e265a721fd03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap333fcbb4-15", "ovs_interfaceid": "333fcbb4-150d-4ee3-bd25-8ef1d049b40d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.613020] env[63028]: DEBUG nova.network.neutron [None req-9b460c26-89fc-48c0-b7a6-dc971897af28 tempest-ServersAdminTestJSON-1015437815 tempest-ServersAdminTestJSON-1015437815-project-admin] [instance: f4718363-73b2-4016-8849-f75e98259023] Updating instance_info_cache with network_info: [{"id": "d63bc515-e6bf-4c5c-88b1-4d1e21688dc4", "address": "fa:16:3e:23:97:fb", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd63bc515-e6", "ovs_interfaceid": "d63bc515-e6bf-4c5c-88b1-4d1e21688dc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.670237] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.873959] env[63028]: DEBUG oslo_vmware.api [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735582, 'name': RemoveSnapshot_Task, 'duration_secs': 1.34291} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.874468] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 832.874814] env[63028]: INFO nova.compute.manager [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Took 16.59 seconds to snapshot the instance on the hypervisor. [ 832.998216] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735585, 'name': PowerOffVM_Task, 'duration_secs': 0.221033} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.998491] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 832.998715] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 832.999452] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12028012-b653-43e5-b1db-ae52bda15b2b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.006330] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 833.006773] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a627ff71-c433-498b-bf5a-810d60dc3600 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.063150] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 833.063321] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 833.063509] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleting the datastore file [datastore1] 514c83d1-4fb1-435c-8c25-aa112c744131 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 833.063774] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12e7e37b-754a-4992-9d4c-db09837b9857 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.070280] env[63028]: DEBUG nova.scheduler.client.report [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 833.075222] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 833.075222] env[63028]: value = "task-2735589" [ 833.075222] env[63028]: _type = "Task" [ 833.075222] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.090897] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735586, 'name': Rename_Task, 'duration_secs': 0.265437} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.091228] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.092399] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 833.092649] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18ffd0bf-6d2f-48a4-b02d-7d8048f16ed6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.096791] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735587, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077048} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.097344] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 833.098090] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7f6822-92f6-453d-9a5f-d0af577fc5b5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.102122] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 833.102122] env[63028]: value = "task-2735590" [ 833.102122] env[63028]: _type = "Task" [ 833.102122] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.113929] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Releasing lock "refresh_cache-56d39801-f3e7-4cfe-a038-6a5e762bfda8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.114246] env[63028]: DEBUG nova.compute.manager [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Instance network_info: |[{"id": "333fcbb4-150d-4ee3-bd25-8ef1d049b40d", "address": "fa:16:3e:f7:71:1a", "network": {"id": "95ccbf6d-2bd9-42ff-93f9-5f9f541e5ba1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-863839356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9b6ca6cccb940f0a516e265a721fd03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap333fcbb4-15", "ovs_interfaceid": "333fcbb4-150d-4ee3-bd25-8ef1d049b40d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 833.114678] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9b460c26-89fc-48c0-b7a6-dc971897af28 tempest-ServersAdminTestJSON-1015437815 tempest-ServersAdminTestJSON-1015437815-project-admin] Releasing lock "refresh_cache-f4718363-73b2-4016-8849-f75e98259023" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.114857] env[63028]: DEBUG nova.compute.manager [None req-9b460c26-89fc-48c0-b7a6-dc971897af28 tempest-ServersAdminTestJSON-1015437815 tempest-ServersAdminTestJSON-1015437815-project-admin] [instance: f4718363-73b2-4016-8849-f75e98259023] Inject network info {{(pid=63028) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 833.115084] env[63028]: DEBUG nova.compute.manager [None req-9b460c26-89fc-48c0-b7a6-dc971897af28 tempest-ServersAdminTestJSON-1015437815 tempest-ServersAdminTestJSON-1015437815-project-admin] [instance: f4718363-73b2-4016-8849-f75e98259023] network_info to inject: |[{"id": "d63bc515-e6bf-4c5c-88b1-4d1e21688dc4", "address": "fa:16:3e:23:97:fb", "network": {"id": "8cd8a274-fe00-4495-9dbd-201d72164415", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-450064933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d3152ab577947b28de82f4801285f8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd63bc515-e6", "ovs_interfaceid": "d63bc515-e6bf-4c5c-88b1-4d1e21688dc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 833.119712] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9b460c26-89fc-48c0-b7a6-dc971897af28 tempest-ServersAdminTestJSON-1015437815 tempest-ServersAdminTestJSON-1015437815-project-admin] [instance: f4718363-73b2-4016-8849-f75e98259023] Reconfiguring VM instance to set the machine id {{(pid=63028) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 833.128232] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 70888889-4965-47ab-ad47-59f1c1286bd8/70888889-4965-47ab-ad47-59f1c1286bd8.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 833.129117] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:71:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '333fcbb4-150d-4ee3-bd25-8ef1d049b40d', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 833.136271] env[63028]: DEBUG oslo.service.loopingcall [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 833.136477] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-628f0d15-aff0-4bb2-be48-1df06e5d9e19 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.147012] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4758170b-35b0-4ac9-b0d5-2dd32d7abbe7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.167023] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 833.167023] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735590, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.167023] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb388e2b-adb3-4cd2-bab0-1ba09cce0be8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.185224] env[63028]: DEBUG oslo_vmware.api [None req-9b460c26-89fc-48c0-b7a6-dc971897af28 tempest-ServersAdminTestJSON-1015437815 tempest-ServersAdminTestJSON-1015437815-project-admin] Waiting for the task: (returnval){ [ 833.185224] env[63028]: value = "task-2735591" [ 833.185224] env[63028]: _type = "Task" [ 833.185224] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.186575] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 833.186575] env[63028]: value = "task-2735592" [ 833.186575] env[63028]: _type = "Task" [ 833.186575] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.192495] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 833.192495] env[63028]: value = "task-2735593" [ 833.192495] env[63028]: _type = "Task" [ 833.192495] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.203029] env[63028]: DEBUG oslo_vmware.api [None req-9b460c26-89fc-48c0-b7a6-dc971897af28 tempest-ServersAdminTestJSON-1015437815 tempest-ServersAdminTestJSON-1015437815-project-admin] Task: {'id': task-2735591, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.206666] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735592, 'name': CreateVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.209926] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735593, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.326569] env[63028]: DEBUG nova.compute.manager [req-c83768e0-fdfe-4172-9d0d-fe88ffbfc58c req-d86a9877-0739-4818-a9f4-bdaa4c9c445c service nova] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Received event network-changed-333fcbb4-150d-4ee3-bd25-8ef1d049b40d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 833.326569] env[63028]: DEBUG nova.compute.manager [req-c83768e0-fdfe-4172-9d0d-fe88ffbfc58c req-d86a9877-0739-4818-a9f4-bdaa4c9c445c service nova] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Refreshing instance network info cache due to event network-changed-333fcbb4-150d-4ee3-bd25-8ef1d049b40d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 833.326667] env[63028]: DEBUG oslo_concurrency.lockutils [req-c83768e0-fdfe-4172-9d0d-fe88ffbfc58c req-d86a9877-0739-4818-a9f4-bdaa4c9c445c service nova] Acquiring lock "refresh_cache-56d39801-f3e7-4cfe-a038-6a5e762bfda8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.326795] env[63028]: DEBUG oslo_concurrency.lockutils [req-c83768e0-fdfe-4172-9d0d-fe88ffbfc58c req-d86a9877-0739-4818-a9f4-bdaa4c9c445c service nova] Acquired lock "refresh_cache-56d39801-f3e7-4cfe-a038-6a5e762bfda8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.327024] env[63028]: DEBUG nova.network.neutron [req-c83768e0-fdfe-4172-9d0d-fe88ffbfc58c req-d86a9877-0739-4818-a9f4-bdaa4c9c445c service nova] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Refreshing network info cache for port 333fcbb4-150d-4ee3-bd25-8ef1d049b40d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.436618] env[63028]: DEBUG nova.compute.manager [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Found 3 images (rotation: 2) {{(pid=63028) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 833.436821] env[63028]: DEBUG nova.compute.manager [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Rotating out 1 backups {{(pid=63028) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 833.437041] env[63028]: DEBUG nova.compute.manager [None req-e6afe889-fe9d-4144-b620-f9ab47407b37 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Deleting image 40205a3b-c684-4015-938e-72089044b955 {{(pid=63028) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 833.468389] env[63028]: DEBUG nova.compute.manager [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 833.495206] env[63028]: DEBUG nova.virt.hardware [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 833.495401] env[63028]: DEBUG nova.virt.hardware [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 833.495401] env[63028]: DEBUG nova.virt.hardware [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 833.495977] env[63028]: DEBUG nova.virt.hardware [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 833.495977] env[63028]: DEBUG nova.virt.hardware [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 833.495977] env[63028]: DEBUG nova.virt.hardware [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 833.496342] env[63028]: DEBUG nova.virt.hardware [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 833.496557] env[63028]: DEBUG nova.virt.hardware [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 833.496736] env[63028]: DEBUG nova.virt.hardware [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 833.496906] env[63028]: DEBUG nova.virt.hardware [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 833.497080] env[63028]: DEBUG nova.virt.hardware [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 833.497945] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8332abb6-883e-4eb0-b68c-d256688f9f78 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.506772] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93e0d64-4d0c-46e5-9208-9368b8a0d827 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.521579] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Instance VIF info [] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 833.527714] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Creating folder: Project (0764ec0016b94cf985e085e1488c1696). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.528028] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-337bfd65-4b83-483b-954b-050370c55d3b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.538946] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Created folder: Project (0764ec0016b94cf985e085e1488c1696) in parent group-v550570. [ 833.539153] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Creating folder: Instances. Parent ref: group-v550768. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.540937] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4bb68da7-6c87-4a86-9767-cb24f37de28a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.550488] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Created folder: Instances in parent group-v550768. [ 833.550723] env[63028]: DEBUG oslo.service.loopingcall [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 833.551011] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 833.551129] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e1aa7fc-7756-4a46-ae97-1ca63544f37d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.567395] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 833.567395] env[63028]: value = "task-2735596" [ 833.567395] env[63028]: _type = "Task" [ 833.567395] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.575539] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735596, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.576351] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.129s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.578548] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.448s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.579987] env[63028]: INFO nova.compute.claims [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.592113] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147072} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.592683] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 833.592683] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 833.592865] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 833.600568] env[63028]: INFO nova.scheduler.client.report [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Deleted allocations for instance 1af19279-e75b-4ec5-91f1-a0a101b229b2 [ 833.614604] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735590, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.704402] env[63028]: DEBUG oslo_vmware.api [None req-9b460c26-89fc-48c0-b7a6-dc971897af28 tempest-ServersAdminTestJSON-1015437815 tempest-ServersAdminTestJSON-1015437815-project-admin] Task: {'id': task-2735591, 'name': ReconfigVM_Task, 'duration_secs': 0.192779} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.704823] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735592, 'name': CreateVM_Task, 'duration_secs': 0.417568} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.705471] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9b460c26-89fc-48c0-b7a6-dc971897af28 tempest-ServersAdminTestJSON-1015437815 tempest-ServersAdminTestJSON-1015437815-project-admin] [instance: f4718363-73b2-4016-8849-f75e98259023] Reconfigured VM instance to set the machine id {{(pid=63028) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 833.705762] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.706559] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.706731] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.707077] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 833.709986] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62685dee-b9d4-460f-8750-18575a34359e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.711693] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735593, 'name': ReconfigVM_Task, 'duration_secs': 0.295042} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.711955] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 70888889-4965-47ab-ad47-59f1c1286bd8/70888889-4965-47ab-ad47-59f1c1286bd8.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 833.712902] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94e5bccf-acb6-4eba-b7aa-100efd004f9d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.715861] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 833.715861] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527506ea-cf09-c2b2-afab-ebca6f657226" [ 833.715861] env[63028]: _type = "Task" [ 833.715861] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.720707] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 833.720707] env[63028]: value = "task-2735597" [ 833.720707] env[63028]: _type = "Task" [ 833.720707] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.724408] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527506ea-cf09-c2b2-afab-ebca6f657226, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.732250] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735597, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.078234] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735596, 'name': CreateVM_Task, 'duration_secs': 0.349956} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.078234] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 834.078234] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.078234] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.078409] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 834.078692] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6751a5f0-004b-400c-b743-75f87f59fd10 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.084756] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 834.084756] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c20179-8ded-2b99-ced5-c850dc4bb022" [ 834.084756] env[63028]: _type = "Task" [ 834.084756] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.094985] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c20179-8ded-2b99-ced5-c850dc4bb022, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.109873] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e7a62a25-939b-4556-8d49-f66e9a7f16ee tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "1af19279-e75b-4ec5-91f1-a0a101b229b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.325s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.116396] env[63028]: DEBUG oslo_vmware.api [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735590, 'name': PowerOnVM_Task, 'duration_secs': 0.683492} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.116977] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 834.117232] env[63028]: INFO nova.compute.manager [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Took 8.18 seconds to spawn the instance on the hypervisor. [ 834.117551] env[63028]: DEBUG nova.compute.manager [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 834.118411] env[63028]: DEBUG nova.network.neutron [req-c83768e0-fdfe-4172-9d0d-fe88ffbfc58c req-d86a9877-0739-4818-a9f4-bdaa4c9c445c service nova] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Updated VIF entry in instance network info cache for port 333fcbb4-150d-4ee3-bd25-8ef1d049b40d. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 834.118726] env[63028]: DEBUG nova.network.neutron [req-c83768e0-fdfe-4172-9d0d-fe88ffbfc58c req-d86a9877-0739-4818-a9f4-bdaa4c9c445c service nova] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Updating instance_info_cache with network_info: [{"id": "333fcbb4-150d-4ee3-bd25-8ef1d049b40d", "address": "fa:16:3e:f7:71:1a", "network": {"id": "95ccbf6d-2bd9-42ff-93f9-5f9f541e5ba1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-863839356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9b6ca6cccb940f0a516e265a721fd03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "06eaa4c9-dbc2-4d38-a844-7bf76e7b5a64", "external-id": "nsx-vlan-transportzone-804", "segmentation_id": 804, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap333fcbb4-15", "ovs_interfaceid": "333fcbb4-150d-4ee3-bd25-8ef1d049b40d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.122311] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c556d8e-c95a-4818-ac21-cb489ff186cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.226444] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527506ea-cf09-c2b2-afab-ebca6f657226, 'name': SearchDatastore_Task, 'duration_secs': 0.010737} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.230019] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.230019] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.230150] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.230258] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.230447] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.230698] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65e71f7d-1398-4c8e-98b2-c4b684fb4eb8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.237566] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735597, 'name': Rename_Task, 'duration_secs': 0.168801} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.237810] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 834.238052] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-beb65761-4e79-40f6-85c6-2355dff43054 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.241472] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.241667] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.242415] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b305f3d3-d039-4ca4-8d78-ffa233d22f0f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.245971] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 834.245971] env[63028]: value = "task-2735598" [ 834.245971] env[63028]: _type = "Task" [ 834.245971] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.250166] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 834.250166] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5295c4a1-7e6c-7281-e01f-9e4dfdb07b84" [ 834.250166] env[63028]: _type = "Task" [ 834.250166] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.256131] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735598, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.261070] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5295c4a1-7e6c-7281-e01f-9e4dfdb07b84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.599405] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c20179-8ded-2b99-ced5-c850dc4bb022, 'name': SearchDatastore_Task, 'duration_secs': 0.010173} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.599920] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.600336] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.600640] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.600845] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.601100] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.604147] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c6304f5-bc8b-4bd8-92e9-0125dd734658 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.621653] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.621653] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.622640] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-171f8388-deba-4114-9463-99fead22c399 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.626017] env[63028]: DEBUG oslo_concurrency.lockutils [req-c83768e0-fdfe-4172-9d0d-fe88ffbfc58c req-d86a9877-0739-4818-a9f4-bdaa4c9c445c service nova] Releasing lock "refresh_cache-56d39801-f3e7-4cfe-a038-6a5e762bfda8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.634118] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 834.634118] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5257c41f-984d-9ade-2f65-4edc88e28fab" [ 834.634118] env[63028]: _type = "Task" [ 834.634118] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.640340] env[63028]: DEBUG nova.virt.hardware [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 834.640581] env[63028]: DEBUG nova.virt.hardware [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.640733] env[63028]: DEBUG nova.virt.hardware [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 834.640910] env[63028]: DEBUG nova.virt.hardware [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.641112] env[63028]: DEBUG nova.virt.hardware [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 834.641208] env[63028]: DEBUG nova.virt.hardware [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 834.641406] env[63028]: DEBUG nova.virt.hardware [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 834.641563] env[63028]: DEBUG nova.virt.hardware [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 834.641803] env[63028]: DEBUG nova.virt.hardware [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 834.641873] env[63028]: DEBUG nova.virt.hardware [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 834.642047] env[63028]: DEBUG nova.virt.hardware [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 834.643321] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ab79ce-b040-4a07-8f2b-45782d19910c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.650902] env[63028]: INFO nova.compute.manager [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Took 49.01 seconds to build instance. [ 834.658130] env[63028]: INFO nova.compute.manager [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Rebuilding instance [ 834.671089] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc43db2d-26d6-489f-a351-223f8467eb49 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.673273] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5257c41f-984d-9ade-2f65-4edc88e28fab, 'name': SearchDatastore_Task, 'duration_secs': 0.010537} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.678221] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5603015a-b1a6-4258-a0f5-e78219080c65 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.691625] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:4e:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5446413d-c3b0-4cd2-a962-62240db178ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e514afb-c9a6-43c6-bfec-ac1a32620674', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.699157] env[63028]: DEBUG oslo.service.loopingcall [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.706060] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 834.706060] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49e6354a-3b59-44db-b590-c082bcb790e5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.724344] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 834.724344] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52672dac-d40b-2480-8b33-9aa222ac5916" [ 834.724344] env[63028]: _type = "Task" [ 834.724344] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.730048] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.730048] env[63028]: value = "task-2735599" [ 834.730048] env[63028]: _type = "Task" [ 834.730048] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.733874] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52672dac-d40b-2480-8b33-9aa222ac5916, 'name': SearchDatastore_Task, 'duration_secs': 0.010328} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.738892] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.739168] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 13e0ca05-3ab3-43e2-8b0d-8045e26d6723/13e0ca05-3ab3-43e2-8b0d-8045e26d6723.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.741819] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d5c4604-c91f-4f0f-b656-0558583a2813 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.749413] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735599, 'name': CreateVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.755504] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 834.755504] env[63028]: value = "task-2735600" [ 834.755504] env[63028]: _type = "Task" [ 834.755504] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.755937] env[63028]: DEBUG nova.compute.manager [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 834.762205] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6eb9f74-831b-43b4-8e32-22c267ccac47 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.773428] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735598, 'name': PowerOnVM_Task, 'duration_secs': 0.458813} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.774025] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 834.774224] env[63028]: DEBUG nova.compute.manager [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 834.778199] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64def440-8994-4304-a5e6-d2c4365a45ee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.781179] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5295c4a1-7e6c-7281-e01f-9e4dfdb07b84, 'name': SearchDatastore_Task, 'duration_secs': 0.011614} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.788758] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735600, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.793396] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64845c02-a9bf-4237-9425-b180a17f75d4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.801660] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 834.801660] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52750635-2ff3-2f3d-2724-715b70dff3ac" [ 834.801660] env[63028]: _type = "Task" [ 834.801660] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.814740] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52750635-2ff3-2f3d-2724-715b70dff3ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.154315] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81ee569f-ddd8-4590-ac59-e5b6b9865946 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.111s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.221818] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8649cb85-664b-4f4d-977a-708bb451b4ed {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.232181] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8f05c2-d880-44e3-834c-579ac5ee060c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.243160] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735599, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.271010] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1814b368-08b4-4904-9fb2-934e95caca29 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.277033] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735600, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511624} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.279094] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 13e0ca05-3ab3-43e2-8b0d-8045e26d6723/13e0ca05-3ab3-43e2-8b0d-8045e26d6723.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.279325] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.279605] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2fa9f69-a3e5-4045-8218-8ea27ae2142f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.282225] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cca06e-105d-46ce-b320-29a43c97c673 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.298773] env[63028]: DEBUG nova.compute.provider_tree [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.302085] env[63028]: INFO nova.compute.manager [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] bringing vm to original state: 'stopped' [ 835.304454] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 835.304454] env[63028]: value = "task-2735601" [ 835.304454] env[63028]: _type = "Task" [ 835.304454] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.319489] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52750635-2ff3-2f3d-2724-715b70dff3ac, 'name': SearchDatastore_Task, 'duration_secs': 0.018759} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.319703] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735601, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.319928] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.320190] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 56d39801-f3e7-4cfe-a038-6a5e762bfda8/56d39801-f3e7-4cfe-a038-6a5e762bfda8.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.320452] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea76e635-1743-4e6e-9cc7-4fee70cee0f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.325938] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 835.325938] env[63028]: value = "task-2735602" [ 835.325938] env[63028]: _type = "Task" [ 835.325938] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.333473] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735602, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.361391] env[63028]: DEBUG oslo_concurrency.lockutils [None req-354c0775-84f3-4df3-a8ea-2bc4027103d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "c06813c4-472d-4bf9-84ec-0d01306bcd48" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.361692] env[63028]: DEBUG oslo_concurrency.lockutils [None req-354c0775-84f3-4df3-a8ea-2bc4027103d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.361877] env[63028]: DEBUG nova.compute.manager [None req-354c0775-84f3-4df3-a8ea-2bc4027103d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 835.362814] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cb37cf-b047-4990-8d3a-bd755df0be78 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.370340] env[63028]: DEBUG nova.compute.manager [None req-354c0775-84f3-4df3-a8ea-2bc4027103d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63028) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 835.370620] env[63028]: DEBUG nova.objects.instance [None req-354c0775-84f3-4df3-a8ea-2bc4027103d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'flavor' on Instance uuid c06813c4-472d-4bf9-84ec-0d01306bcd48 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 835.421264] env[63028]: DEBUG nova.compute.manager [req-3872f8f5-0daa-48b7-9455-62a09a51a92d req-7127f391-3752-4c65-9c88-474c7fe9b1d9 service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Received event network-changed-6ecb125b-389c-4dce-8446-368a7298e497 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 835.421264] env[63028]: DEBUG nova.compute.manager [req-3872f8f5-0daa-48b7-9455-62a09a51a92d req-7127f391-3752-4c65-9c88-474c7fe9b1d9 service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Refreshing instance network info cache due to event network-changed-6ecb125b-389c-4dce-8446-368a7298e497. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 835.421264] env[63028]: DEBUG oslo_concurrency.lockutils [req-3872f8f5-0daa-48b7-9455-62a09a51a92d req-7127f391-3752-4c65-9c88-474c7fe9b1d9 service nova] Acquiring lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.421264] env[63028]: DEBUG oslo_concurrency.lockutils [req-3872f8f5-0daa-48b7-9455-62a09a51a92d req-7127f391-3752-4c65-9c88-474c7fe9b1d9 service nova] Acquired lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.421264] env[63028]: DEBUG nova.network.neutron [req-3872f8f5-0daa-48b7-9455-62a09a51a92d req-7127f391-3752-4c65-9c88-474c7fe9b1d9 service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Refreshing network info cache for port 6ecb125b-389c-4dce-8446-368a7298e497 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 835.427512] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.427953] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.428229] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.428423] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.428596] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.431105] env[63028]: INFO nova.compute.manager [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Terminating instance [ 835.658178] env[63028]: DEBUG nova.compute.manager [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 835.745244] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735599, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.801047] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 835.801332] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b67eea6-03ec-4d64-b7cf-f6d1cde1fbaa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.806477] env[63028]: DEBUG nova.scheduler.client.report [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 835.817995] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 835.817995] env[63028]: value = "task-2735603" [ 835.817995] env[63028]: _type = "Task" [ 835.817995] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.824901] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735601, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061782} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.826454] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.827414] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cf9347-348e-42ad-895c-249e89bf421c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.837177] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.850789] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735602, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504759} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.859408] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 13e0ca05-3ab3-43e2-8b0d-8045e26d6723/13e0ca05-3ab3-43e2-8b0d-8045e26d6723.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.859716] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 56d39801-f3e7-4cfe-a038-6a5e762bfda8/56d39801-f3e7-4cfe-a038-6a5e762bfda8.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.859920] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.860433] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c830861-9137-4303-8f18-7c889683de4f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.874152] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57789cb6-2d7f-4542-ac4b-fc40eb1b25aa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.883308] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 835.883308] env[63028]: value = "task-2735604" [ 835.883308] env[63028]: _type = "Task" [ 835.883308] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.887415] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 835.887415] env[63028]: value = "task-2735605" [ 835.887415] env[63028]: _type = "Task" [ 835.887415] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.894698] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735604, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.901593] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735605, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.934686] env[63028]: DEBUG nova.compute.manager [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 835.934930] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 835.935825] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae1498b-dfcf-4457-8390-8283805577c6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.943836] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 835.944098] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f08e53e3-5d78-49af-94b7-b87b870082d5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.951183] env[63028]: DEBUG oslo_vmware.api [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 835.951183] env[63028]: value = "task-2735606" [ 835.951183] env[63028]: _type = "Task" [ 835.951183] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.959566] env[63028]: DEBUG oslo_vmware.api [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735606, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.189876] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.211073] env[63028]: DEBUG nova.network.neutron [req-3872f8f5-0daa-48b7-9455-62a09a51a92d req-7127f391-3752-4c65-9c88-474c7fe9b1d9 service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updated VIF entry in instance network info cache for port 6ecb125b-389c-4dce-8446-368a7298e497. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 836.211508] env[63028]: DEBUG nova.network.neutron [req-3872f8f5-0daa-48b7-9455-62a09a51a92d req-7127f391-3752-4c65-9c88-474c7fe9b1d9 service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance_info_cache with network_info: [{"id": "6ecb125b-389c-4dce-8446-368a7298e497", "address": "fa:16:3e:f4:06:c4", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecb125b-38", "ovs_interfaceid": "6ecb125b-389c-4dce-8446-368a7298e497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.246041] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735599, 'name': CreateVM_Task, 'duration_secs': 1.418253} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.246222] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 836.246917] env[63028]: DEBUG oslo_concurrency.lockutils [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.250363] env[63028]: DEBUG oslo_concurrency.lockutils [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.250363] env[63028]: DEBUG oslo_concurrency.lockutils [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 836.250363] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79d2dc8e-25eb-42d3-afe6-8de246002974 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.252707] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 836.252707] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52039586-67f9-2bcf-7c73-ec7e21cebc08" [ 836.252707] env[63028]: _type = "Task" [ 836.252707] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.260617] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52039586-67f9-2bcf-7c73-ec7e21cebc08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.318025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.737s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.318025] env[63028]: DEBUG nova.compute.manager [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 836.319752] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 31.724s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.321598] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "70888889-4965-47ab-ad47-59f1c1286bd8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.321921] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "70888889-4965-47ab-ad47-59f1c1286bd8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.322206] env[63028]: DEBUG nova.compute.manager [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 836.323552] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a66469-8234-4f3b-aa98-ee58abcd0acd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.335494] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735603, 'name': PowerOffVM_Task, 'duration_secs': 0.516979} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.337172] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 836.337410] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 836.337723] env[63028]: DEBUG nova.compute.manager [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63028) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 836.340379] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c140701-c7e3-4838-b853-d68881f7aa57 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.348635] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 836.348987] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6213cfd4-0602-41a3-8bc2-50ff90966ad3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.380833] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-354c0775-84f3-4df3-a8ea-2bc4027103d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 836.382012] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67d1aa8d-8256-419d-a688-c0c665e4682f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.388504] env[63028]: DEBUG oslo_vmware.api [None req-354c0775-84f3-4df3-a8ea-2bc4027103d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 836.388504] env[63028]: value = "task-2735608" [ 836.388504] env[63028]: _type = "Task" [ 836.388504] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.397785] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735604, 'name': ReconfigVM_Task, 'duration_secs': 0.39239} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.398448] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 13e0ca05-3ab3-43e2-8b0d-8045e26d6723/13e0ca05-3ab3-43e2-8b0d-8045e26d6723.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.399446] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01814eae-540b-43fb-a3eb-f621c773c021 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.407917] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735605, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090735} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.408167] env[63028]: DEBUG oslo_vmware.api [None req-354c0775-84f3-4df3-a8ea-2bc4027103d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735608, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.408770] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.410076] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0e6355-2536-4cd6-8a5b-7757cb307cfb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.414448] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 836.414448] env[63028]: value = "task-2735609" [ 836.414448] env[63028]: _type = "Task" [ 836.414448] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.437630] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 56d39801-f3e7-4cfe-a038-6a5e762bfda8/56d39801-f3e7-4cfe-a038-6a5e762bfda8.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.438780] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bb3d96b-9494-49b5-9bc0-eff312987395 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.458061] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735609, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.458387] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 836.458556] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 836.458667] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleting the datastore file [datastore2] ba57ed92-aaef-460c-bd45-d0cbe09e4615 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 836.462011] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e526c93c-f06e-4994-af28-651e9f2a75ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.465689] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 836.465689] env[63028]: value = "task-2735610" [ 836.465689] env[63028]: _type = "Task" [ 836.465689] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.468996] env[63028]: DEBUG oslo_vmware.api [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735606, 'name': PowerOffVM_Task, 'duration_secs': 0.214523} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.472666] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 836.472836] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 836.473257] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 836.473257] env[63028]: value = "task-2735611" [ 836.473257] env[63028]: _type = "Task" [ 836.473257] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.473471] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e564f37-8dd9-4734-bc8d-404e3360cc85 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.481412] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735610, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.486903] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735611, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.550848] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 836.551134] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 836.551340] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Deleting the datastore file [datastore1] 6e0959ac-8fca-47eb-b501-b50a3e9f025a {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 836.551646] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-752c0282-ea73-4493-b90e-fd560997fb43 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.557904] env[63028]: DEBUG oslo_vmware.api [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for the task: (returnval){ [ 836.557904] env[63028]: value = "task-2735613" [ 836.557904] env[63028]: _type = "Task" [ 836.557904] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.566165] env[63028]: DEBUG oslo_vmware.api [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735613, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.714653] env[63028]: DEBUG oslo_concurrency.lockutils [req-3872f8f5-0daa-48b7-9455-62a09a51a92d req-7127f391-3752-4c65-9c88-474c7fe9b1d9 service nova] Releasing lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.762646] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52039586-67f9-2bcf-7c73-ec7e21cebc08, 'name': SearchDatastore_Task, 'duration_secs': 0.01221} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.762953] env[63028]: DEBUG oslo_concurrency.lockutils [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.763196] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 836.763425] env[63028]: DEBUG oslo_concurrency.lockutils [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.763574] env[63028]: DEBUG oslo_concurrency.lockutils [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.763766] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 836.764028] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-efa37f3c-4133-4e5f-ab31-2f2a25678dbb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.772485] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 836.772705] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 836.773454] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9264bdf8-4754-4f26-8be4-e867d24cb3f3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.778666] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 836.778666] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52223a6b-860e-e91c-d288-808d6e690332" [ 836.778666] env[63028]: _type = "Task" [ 836.778666] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.785966] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52223a6b-860e-e91c-d288-808d6e690332, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.824659] env[63028]: DEBUG nova.compute.utils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 836.826072] env[63028]: DEBUG nova.compute.manager [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 836.826249] env[63028]: DEBUG nova.network.neutron [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.833614] env[63028]: INFO nova.compute.claims [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.845080] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 836.845376] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cc838a6-2c2c-4eee-b4ed-df6b98cb3bd0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.852666] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 836.852666] env[63028]: value = "task-2735614" [ 836.852666] env[63028]: _type = "Task" [ 836.852666] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.862019] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735614, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.902100] env[63028]: DEBUG oslo_vmware.api [None req-354c0775-84f3-4df3-a8ea-2bc4027103d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735608, 'name': PowerOffVM_Task, 'duration_secs': 0.23613} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.902351] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-354c0775-84f3-4df3-a8ea-2bc4027103d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 836.903045] env[63028]: DEBUG nova.compute.manager [None req-354c0775-84f3-4df3-a8ea-2bc4027103d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 836.903585] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3443b365-af76-454d-be57-cbbe2e7faef6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.908019] env[63028]: DEBUG nova.policy [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1b35dc6f0b14b528c2690fdf57410c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9ca23bbd50b041859820261db200b1af', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 836.925149] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735609, 'name': Rename_Task, 'duration_secs': 0.153176} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.925149] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.925149] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be85437f-8ed9-4b35-9418-b8420a6c5625 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.931535] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 836.931535] env[63028]: value = "task-2735615" [ 836.931535] env[63028]: _type = "Task" [ 836.931535] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.939867] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735615, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.983301] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735610, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.988477] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735611, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191925} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.988738] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.988923] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.989179] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 837.067590] env[63028]: DEBUG oslo_vmware.api [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Task: {'id': task-2735613, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158984} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.067913] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 837.068140] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 837.068391] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 837.068594] env[63028]: INFO nova.compute.manager [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 837.068880] env[63028]: DEBUG oslo.service.loopingcall [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.069114] env[63028]: DEBUG nova.compute.manager [-] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 837.069214] env[63028]: DEBUG nova.network.neutron [-] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.215137] env[63028]: DEBUG nova.network.neutron [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Successfully created port: 9c71a142-583d-4c67-95dc-f09f116bf3c5 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.292915] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52223a6b-860e-e91c-d288-808d6e690332, 'name': SearchDatastore_Task, 'duration_secs': 0.009207} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.294694] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c997fb5b-cd73-4130-be46-03661e8921ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.304769] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 837.304769] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5211ae61-f6a4-aac8-0500-8b57b7759291" [ 837.304769] env[63028]: _type = "Task" [ 837.304769] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.316562] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5211ae61-f6a4-aac8-0500-8b57b7759291, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.331644] env[63028]: DEBUG nova.compute.manager [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 837.339712] env[63028]: INFO nova.compute.resource_tracker [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating resource usage from migration d7f275c5-0c12-4e9a-baa7-f99e7b616c4f [ 837.365889] env[63028]: DEBUG oslo_vmware.api [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735614, 'name': PowerOffVM_Task, 'duration_secs': 0.287177} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.366161] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 837.366377] env[63028]: DEBUG nova.compute.manager [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 837.367306] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc8097a-ff62-4720-bb96-99713f6bc3e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.420885] env[63028]: DEBUG oslo_concurrency.lockutils [None req-354c0775-84f3-4df3-a8ea-2bc4027103d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.059s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.441958] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735615, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.472336] env[63028]: DEBUG nova.compute.manager [req-0fadbf83-4067-4ef1-9e78-af1abe7f708e req-ac5be78a-ee21-48fd-bbd9-9d50e1ce9d8e service nova] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Received event network-vif-deleted-8f389aaf-a460-47ed-862a-e45d83b3d9e3 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 837.472552] env[63028]: INFO nova.compute.manager [req-0fadbf83-4067-4ef1-9e78-af1abe7f708e req-ac5be78a-ee21-48fd-bbd9-9d50e1ce9d8e service nova] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Neutron deleted interface 8f389aaf-a460-47ed-862a-e45d83b3d9e3; detaching it from the instance and deleting it from the info cache [ 837.472755] env[63028]: DEBUG nova.network.neutron [req-0fadbf83-4067-4ef1-9e78-af1abe7f708e req-ac5be78a-ee21-48fd-bbd9-9d50e1ce9d8e service nova] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.487085] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735610, 'name': ReconfigVM_Task, 'duration_secs': 0.696917} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.487746] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 56d39801-f3e7-4cfe-a038-6a5e762bfda8/56d39801-f3e7-4cfe-a038-6a5e762bfda8.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.488103] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6327e15-3480-41c0-893b-17edd8265271 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.500222] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 837.500222] env[63028]: value = "task-2735616" [ 837.500222] env[63028]: _type = "Task" [ 837.500222] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.509088] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735616, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.825244] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5211ae61-f6a4-aac8-0500-8b57b7759291, 'name': SearchDatastore_Task, 'duration_secs': 0.038603} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.825542] env[63028]: DEBUG oslo_concurrency.lockutils [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.825917] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 514c83d1-4fb1-435c-8c25-aa112c744131/514c83d1-4fb1-435c-8c25-aa112c744131.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 837.826288] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24d8aca0-29fb-4aa4-b71a-c4a51b2fb905 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.833046] env[63028]: DEBUG nova.network.neutron [-] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.844142] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 837.844142] env[63028]: value = "task-2735617" [ 837.844142] env[63028]: _type = "Task" [ 837.844142] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.852143] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.885815] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "70888889-4965-47ab-ad47-59f1c1286bd8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.564s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.943251] env[63028]: DEBUG oslo_vmware.api [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735615, 'name': PowerOnVM_Task, 'duration_secs': 0.915587} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.944032] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.944032] env[63028]: INFO nova.compute.manager [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Took 4.48 seconds to spawn the instance on the hypervisor. [ 837.944032] env[63028]: DEBUG nova.compute.manager [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 837.946981] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7938e9-888e-456e-afb1-3b914dc3a2c5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.974354] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5757482f-7995-45e3-925d-b18d167e2f76 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.982174] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-50cde121-d556-4aa8-b158-c8ec14adb6d4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.986079] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fc2d5f-faed-4e80-97de-66f94ba3b635 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.023659] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7dc8a2-d126-46c8-9d06-dcd722f416a6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.041092] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f528ffe4-a61a-4144-af79-4fb53012569e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.058797] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735616, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.077426] env[63028]: DEBUG nova.compute.manager [req-0fadbf83-4067-4ef1-9e78-af1abe7f708e req-ac5be78a-ee21-48fd-bbd9-9d50e1ce9d8e service nova] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Detach interface failed, port_id=8f389aaf-a460-47ed-862a-e45d83b3d9e3, reason: Instance 6e0959ac-8fca-47eb-b501-b50a3e9f025a could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 838.080233] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0515777-6f5d-495b-a9aa-e8e5388250ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.095315] env[63028]: DEBUG nova.compute.provider_tree [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.098640] env[63028]: DEBUG nova.virt.hardware [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 838.098848] env[63028]: DEBUG nova.virt.hardware [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.099514] env[63028]: DEBUG nova.virt.hardware [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 838.099514] env[63028]: DEBUG nova.virt.hardware [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.099514] env[63028]: DEBUG nova.virt.hardware [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 838.099514] env[63028]: DEBUG nova.virt.hardware [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 838.099764] env[63028]: DEBUG nova.virt.hardware [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 838.099857] env[63028]: DEBUG nova.virt.hardware [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 838.100124] env[63028]: DEBUG nova.virt.hardware [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 838.100274] env[63028]: DEBUG nova.virt.hardware [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 838.100660] env[63028]: DEBUG nova.virt.hardware [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 838.101236] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1febd5cb-2456-44d4-9b9a-467663680531 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.109150] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0998f5fe-b350-42ca-ab18-d2fa76869948 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.125877] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:3c:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66421979-b107-4dd5-9bc4-40bdefa3a5d0', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.133638] env[63028]: DEBUG oslo.service.loopingcall [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.134055] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 838.134664] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6bf124d6-92ba-48a4-be8e-ac9b0d1df213 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.153672] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.153672] env[63028]: value = "task-2735618" [ 838.153672] env[63028]: _type = "Task" [ 838.153672] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.161052] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735618, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.340899] env[63028]: INFO nova.compute.manager [-] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Took 1.27 seconds to deallocate network for instance. [ 838.342022] env[63028]: DEBUG nova.compute.manager [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 838.358157] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735617, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.369274] env[63028]: DEBUG nova.virt.hardware [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 838.369612] env[63028]: DEBUG nova.virt.hardware [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.369776] env[63028]: DEBUG nova.virt.hardware [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 838.370192] env[63028]: DEBUG nova.virt.hardware [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.370192] env[63028]: DEBUG nova.virt.hardware [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 838.370340] env[63028]: DEBUG nova.virt.hardware [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 838.370515] env[63028]: DEBUG nova.virt.hardware [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 838.370787] env[63028]: DEBUG nova.virt.hardware [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 838.370842] env[63028]: DEBUG nova.virt.hardware [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 838.370995] env[63028]: DEBUG nova.virt.hardware [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 838.371182] env[63028]: DEBUG nova.virt.hardware [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 838.372152] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6319afb3-0903-466f-98f1-01d116581e41 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.381029] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e06eef-d2d8-4aa5-a624-5c08bf4a54fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.402471] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.465081] env[63028]: INFO nova.compute.manager [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Took 44.29 seconds to build instance. [ 838.547821] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735616, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.605705] env[63028]: DEBUG nova.scheduler.client.report [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 838.663551] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735618, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.745486] env[63028]: DEBUG nova.compute.manager [req-2795a54a-2ec2-45c6-8eda-787a1beecefb req-42f015f4-8b5b-40d5-aabf-d6b10a908a3b service nova] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Received event network-vif-plugged-9c71a142-583d-4c67-95dc-f09f116bf3c5 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 838.745640] env[63028]: DEBUG oslo_concurrency.lockutils [req-2795a54a-2ec2-45c6-8eda-787a1beecefb req-42f015f4-8b5b-40d5-aabf-d6b10a908a3b service nova] Acquiring lock "022125c4-2b0c-4a2c-ae63-18968887316e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.745904] env[63028]: DEBUG oslo_concurrency.lockutils [req-2795a54a-2ec2-45c6-8eda-787a1beecefb req-42f015f4-8b5b-40d5-aabf-d6b10a908a3b service nova] Lock "022125c4-2b0c-4a2c-ae63-18968887316e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.746127] env[63028]: DEBUG oslo_concurrency.lockutils [req-2795a54a-2ec2-45c6-8eda-787a1beecefb req-42f015f4-8b5b-40d5-aabf-d6b10a908a3b service nova] Lock "022125c4-2b0c-4a2c-ae63-18968887316e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.746313] env[63028]: DEBUG nova.compute.manager [req-2795a54a-2ec2-45c6-8eda-787a1beecefb req-42f015f4-8b5b-40d5-aabf-d6b10a908a3b service nova] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] No waiting events found dispatching network-vif-plugged-9c71a142-583d-4c67-95dc-f09f116bf3c5 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 838.746535] env[63028]: WARNING nova.compute.manager [req-2795a54a-2ec2-45c6-8eda-787a1beecefb req-42f015f4-8b5b-40d5-aabf-d6b10a908a3b service nova] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Received unexpected event network-vif-plugged-9c71a142-583d-4c67-95dc-f09f116bf3c5 for instance with vm_state building and task_state spawning. [ 838.815075] env[63028]: DEBUG nova.compute.manager [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Stashing vm_state: stopped {{(pid=63028) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 838.857026] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.857343] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735617, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.776229} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.857586] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 514c83d1-4fb1-435c-8c25-aa112c744131/514c83d1-4fb1-435c-8c25-aa112c744131.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 838.857791] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 838.858052] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c753d5cc-12ca-4505-b57b-5aa7b2f0f3c5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.865045] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 838.865045] env[63028]: value = "task-2735619" [ 838.865045] env[63028]: _type = "Task" [ 838.865045] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.873482] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735619, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.883491] env[63028]: DEBUG nova.network.neutron [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Successfully updated port: 9c71a142-583d-4c67-95dc-f09f116bf3c5 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 838.906989] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "70888889-4965-47ab-ad47-59f1c1286bd8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.907385] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "70888889-4965-47ab-ad47-59f1c1286bd8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.907666] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "70888889-4965-47ab-ad47-59f1c1286bd8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.907912] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "70888889-4965-47ab-ad47-59f1c1286bd8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.908164] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "70888889-4965-47ab-ad47-59f1c1286bd8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.910241] env[63028]: INFO nova.compute.manager [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Terminating instance [ 838.966640] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b709110c-6e00-4eb2-85eb-129887ad428d tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lock "13e0ca05-3ab3-43e2-8b0d-8045e26d6723" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.677s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.048251] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735616, 'name': Rename_Task, 'duration_secs': 1.328951} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.048543] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 839.048783] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9fe5c531-6607-4e73-8874-3f782c99a261 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.055697] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 839.055697] env[63028]: value = "task-2735620" [ 839.055697] env[63028]: _type = "Task" [ 839.055697] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.759031] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.439s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.759267] env[63028]: INFO nova.compute.manager [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Migrating [ 839.769087] env[63028]: INFO nova.compute.manager [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Rebuilding instance [ 839.771320] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "refresh_cache-022125c4-2b0c-4a2c-ae63-18968887316e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.771476] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquired lock "refresh_cache-022125c4-2b0c-4a2c-ae63-18968887316e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.771781] env[63028]: DEBUG nova.network.neutron [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 839.772898] env[63028]: DEBUG nova.compute.manager [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 839.773111] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 839.773463] env[63028]: DEBUG nova.compute.manager [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 839.775731] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.775956] env[63028]: WARNING oslo_vmware.common.loopingcall [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] task run outlasted interval by 0.220005 sec [ 839.781142] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.368s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.790268] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46dae72-5ad9-46fc-8b0b-c56ba4059b21 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.802620] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.817599] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735620, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.817599] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735618, 'name': CreateVM_Task, 'duration_secs': 0.719838} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.826758] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 839.826758] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 839.826758] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735619, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070149} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.826758] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.826758] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.826758] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 839.827190] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75eea7f8-fb69-46f5-a022-f2a3c6223440 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.829569] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 839.829569] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a055e4a9-668e-49c4-aff7-9a1fa6f79e7f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.832045] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3336e485-db1c-4505-a418-495ce8d096ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.854385] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 514c83d1-4fb1-435c-8c25-aa112c744131/514c83d1-4fb1-435c-8c25-aa112c744131.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 839.857999] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cafd45c-cb05-4c6d-a171-c0b166c23c0d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.872608] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 839.872608] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523a480a-2f85-280b-6e4e-5205520016da" [ 839.872608] env[63028]: _type = "Task" [ 839.872608] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.875147] env[63028]: DEBUG nova.compute.manager [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 839.879591] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55110757-4e94-4e79-ac3f-7fc1b2e5dbdd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.883578] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 839.883578] env[63028]: value = "task-2735622" [ 839.883578] env[63028]: _type = "Task" [ 839.883578] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.897023] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523a480a-2f85-280b-6e4e-5205520016da, 'name': SearchDatastore_Task, 'duration_secs': 0.010974} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.898721] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.899050] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.899302] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.899454] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.899633] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.899904] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 839.900091] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 839.900260] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleting the datastore file [datastore2] 70888889-4965-47ab-ad47-59f1c1286bd8 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.900797] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7d445a3-6186-4b89-9d1f-857492f757d3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.902754] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d06ba7db-870b-47ff-acec-bb4718262829 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.907700] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735622, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.916227] env[63028]: DEBUG oslo_vmware.api [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 839.916227] env[63028]: value = "task-2735623" [ 839.916227] env[63028]: _type = "Task" [ 839.916227] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.916661] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.916937] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 839.920675] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef68c65b-3b81-43cc-bb2c-03c790079b65 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.930447] env[63028]: DEBUG oslo_vmware.api [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735623, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.932459] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 839.932459] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b3cd64-dea7-bb71-1f1d-e68000a6bffe" [ 839.932459] env[63028]: _type = "Task" [ 839.932459] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.941878] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b3cd64-dea7-bb71-1f1d-e68000a6bffe, 'name': SearchDatastore_Task, 'duration_secs': 0.008228} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.942694] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fa8eca4-c62e-4151-8fc3-f691c874ae13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.947351] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 839.947351] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5259c9a6-b1e7-51df-d49e-eb0f40819e98" [ 839.947351] env[63028]: _type = "Task" [ 839.947351] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.956115] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5259c9a6-b1e7-51df-d49e-eb0f40819e98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.294736] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.299111] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.299284] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.299453] env[63028]: DEBUG nova.network.neutron [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.314221] env[63028]: DEBUG oslo_vmware.api [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735620, 'name': PowerOnVM_Task, 'duration_secs': 1.150978} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.317512] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 840.317883] env[63028]: INFO nova.compute.manager [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Took 9.50 seconds to spawn the instance on the hypervisor. [ 840.317942] env[63028]: DEBUG nova.compute.manager [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 840.319130] env[63028]: DEBUG nova.network.neutron [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.321384] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532d5388-34cb-470d-b556-c68599619605 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.393775] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735622, 'name': ReconfigVM_Task, 'duration_secs': 0.314405} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.394189] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 514c83d1-4fb1-435c-8c25-aa112c744131/514c83d1-4fb1-435c-8c25-aa112c744131.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 840.394843] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a019227d-25c8-4bb9-b2b1-cb49a948b9e8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.404814] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 840.404814] env[63028]: value = "task-2735624" [ 840.404814] env[63028]: _type = "Task" [ 840.404814] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.415980] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735624, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.423549] env[63028]: DEBUG oslo_vmware.api [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735623, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18043} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.423621] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 840.423833] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 840.424019] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 840.424198] env[63028]: INFO nova.compute.manager [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Took 0.65 seconds to destroy the instance on the hypervisor. [ 840.424441] env[63028]: DEBUG oslo.service.loopingcall [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.424702] env[63028]: DEBUG nova.compute.manager [-] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 840.424702] env[63028]: DEBUG nova.network.neutron [-] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 840.457359] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5259c9a6-b1e7-51df-d49e-eb0f40819e98, 'name': SearchDatastore_Task, 'duration_secs': 0.008165} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.457634] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.457885] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] ba57ed92-aaef-460c-bd45-d0cbe09e4615/ba57ed92-aaef-460c-bd45-d0cbe09e4615.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 840.458162] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4dfbc54e-24f8-4e1e-8a71-eef58c35757c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.466698] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 840.466698] env[63028]: value = "task-2735625" [ 840.466698] env[63028]: _type = "Task" [ 840.466698] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.467780] env[63028]: DEBUG nova.network.neutron [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Updating instance_info_cache with network_info: [{"id": "9c71a142-583d-4c67-95dc-f09f116bf3c5", "address": "fa:16:3e:c3:2b:82", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c71a142-58", "ovs_interfaceid": "9c71a142-583d-4c67-95dc-f09f116bf3c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.476454] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735625, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.665432] env[63028]: DEBUG nova.network.neutron [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance_info_cache with network_info: [{"id": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "address": "fa:16:3e:45:d9:fe", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dd5be5d-a8", "ovs_interfaceid": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.798348] env[63028]: DEBUG nova.compute.manager [req-2e540bd9-7a6c-449f-bdd5-b8d55b665a93 req-d72ba021-2352-460f-bcd1-0f058f097a70 service nova] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Received event network-changed-9c71a142-583d-4c67-95dc-f09f116bf3c5 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 840.798348] env[63028]: DEBUG nova.compute.manager [req-2e540bd9-7a6c-449f-bdd5-b8d55b665a93 req-d72ba021-2352-460f-bcd1-0f058f097a70 service nova] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Refreshing instance network info cache due to event network-changed-9c71a142-583d-4c67-95dc-f09f116bf3c5. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 840.798348] env[63028]: DEBUG oslo_concurrency.lockutils [req-2e540bd9-7a6c-449f-bdd5-b8d55b665a93 req-d72ba021-2352-460f-bcd1-0f058f097a70 service nova] Acquiring lock "refresh_cache-022125c4-2b0c-4a2c-ae63-18968887316e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.816930] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Applying migration context for instance 52b19182-a7e2-4461-b4eb-e6cd8a30024e as it has an incoming, in-progress migration d7f275c5-0c12-4e9a-baa7-f99e7b616c4f. Migration status is pre-migrating {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 840.820087] env[63028]: INFO nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating resource usage from migration d7f275c5-0c12-4e9a-baa7-f99e7b616c4f [ 840.821257] env[63028]: INFO nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating resource usage from migration b2428275-4704-4b98-81dd-222c963eb311 [ 840.825314] env[63028]: DEBUG nova.compute.manager [req-a3382bff-b515-4af4-9334-38785e98fe27 req-30e84971-1cf4-4a00-8646-399fbedd5f3d service nova] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Received event network-vif-deleted-391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 840.825314] env[63028]: INFO nova.compute.manager [req-a3382bff-b515-4af4-9334-38785e98fe27 req-30e84971-1cf4-4a00-8646-399fbedd5f3d service nova] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Neutron deleted interface 391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0; detaching it from the instance and deleting it from the info cache [ 840.825498] env[63028]: DEBUG nova.network.neutron [req-a3382bff-b515-4af4-9334-38785e98fe27 req-30e84971-1cf4-4a00-8646-399fbedd5f3d service nova] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.846025] env[63028]: INFO nova.compute.manager [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Took 53.14 seconds to build instance. [ 840.857911] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance d663c2df-ae54-4c50-a70f-e2180700c700 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.858089] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance ba57ed92-aaef-460c-bd45-d0cbe09e4615 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.858345] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance b9d9fe4e-438c-4f68-b011-9eb9e10a381c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.858345] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance f3277886-4498-45c6-be68-e71d8293dc00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.858475] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance d6137c80-0c09-4655-b264-472753b4fa9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.859047] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 6e0959ac-8fca-47eb-b501-b50a3e9f025a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 840.859047] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 600195de-ceb4-41a6-9ade-dda8b898e4db actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.859047] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 1316318e-8dcf-4ac2-b40a-6a3ab6964997 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.859047] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 5982cd5d-abf1-42d4-bb44-8d79de599f11 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 840.859214] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 3e45e7f3-a34f-4eab-9fff-1c874c832e2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.859214] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance da88308f-ce62-40af-adae-e38aa506bdd9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.859324] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance cd11b318-9158-4f1d-8aa8-1c9d565bb5d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.859434] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 7e914e49-0d70-4024-940b-ad2a15e9dff7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.859543] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 15326f55-2db8-47c3-b1fd-ce8ba1174c79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.859650] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 85aafadb-81d6-4687-aed1-fbe829e5f95f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.859760] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance a1d00736-1a8d-46e0-9358-46e848b94797 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.860117] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 63524cd8-81de-419f-bb07-0326f3cb393f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.860920] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance e346c31b-ef1b-4f75-8564-cefe26bd672f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 840.861101] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 70888889-4965-47ab-ad47-59f1c1286bd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.861222] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 514c83d1-4fb1-435c-8c25-aa112c744131 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.861347] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance f4718363-73b2-4016-8849-f75e98259023 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.861493] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.861605] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 56d39801-f3e7-4cfe-a038-6a5e762bfda8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.861713] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 13e0ca05-3ab3-43e2-8b0d-8045e26d6723 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.861820] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 022125c4-2b0c-4a2c-ae63-18968887316e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.904121] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 840.904523] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd0680c6-9143-4f5d-85c5-783b3f08db63 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.919175] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735624, 'name': Rename_Task, 'duration_secs': 0.138945} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.921101] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 840.921476] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 840.921476] env[63028]: value = "task-2735626" [ 840.921476] env[63028]: _type = "Task" [ 840.921476] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.921697] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b94a4009-011a-4a69-9d80-1e8ff4020104 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.933128] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 840.933128] env[63028]: value = "task-2735627" [ 840.933128] env[63028]: _type = "Task" [ 840.933128] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.936947] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.947481] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735627, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.973493] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Releasing lock "refresh_cache-022125c4-2b0c-4a2c-ae63-18968887316e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.973841] env[63028]: DEBUG nova.compute.manager [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Instance network_info: |[{"id": "9c71a142-583d-4c67-95dc-f09f116bf3c5", "address": "fa:16:3e:c3:2b:82", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c71a142-58", "ovs_interfaceid": "9c71a142-583d-4c67-95dc-f09f116bf3c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 840.974171] env[63028]: DEBUG oslo_concurrency.lockutils [req-2e540bd9-7a6c-449f-bdd5-b8d55b665a93 req-d72ba021-2352-460f-bcd1-0f058f097a70 service nova] Acquired lock "refresh_cache-022125c4-2b0c-4a2c-ae63-18968887316e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.974848] env[63028]: DEBUG nova.network.neutron [req-2e540bd9-7a6c-449f-bdd5-b8d55b665a93 req-d72ba021-2352-460f-bcd1-0f058f097a70 service nova] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Refreshing network info cache for port 9c71a142-583d-4c67-95dc-f09f116bf3c5 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 840.975911] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:2b:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c71a142-583d-4c67-95dc-f09f116bf3c5', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.983383] env[63028]: DEBUG oslo.service.loopingcall [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.984902] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.985145] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4bf2942a-3f2f-40cb-a4cb-71a2bfd8c47b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.003310] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735625, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513838} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.003967] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] ba57ed92-aaef-460c-bd45-d0cbe09e4615/ba57ed92-aaef-460c-bd45-d0cbe09e4615.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 841.004228] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 841.004479] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f1e7bd0-e57e-4d3e-b522-9b94b3f010fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.007845] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 841.007845] env[63028]: value = "task-2735628" [ 841.007845] env[63028]: _type = "Task" [ 841.007845] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.012073] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 841.012073] env[63028]: value = "task-2735629" [ 841.012073] env[63028]: _type = "Task" [ 841.012073] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.018018] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735628, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.023308] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.169220] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.282649] env[63028]: DEBUG nova.network.neutron [-] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.327982] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b8af9ff-126d-4099-83fc-adbd87061158 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.337561] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af5ebf9-09f0-406f-a8af-324e36b13b35 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.349610] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbd96db3-2cb6-4207-bf54-bf453d821f5b tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "56d39801-f3e7-4cfe-a038-6a5e762bfda8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.282s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.382312] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 4e859327-ccd3-440e-b884-67f6cdadf97f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 841.382539] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Migration d7f275c5-0c12-4e9a-baa7-f99e7b616c4f is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 841.384205] env[63028]: DEBUG nova.compute.manager [req-a3382bff-b515-4af4-9334-38785e98fe27 req-30e84971-1cf4-4a00-8646-399fbedd5f3d service nova] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Detach interface failed, port_id=391ce9f8-949e-4d9c-a74f-8c48a5dd3dd0, reason: Instance 70888889-4965-47ab-ad47-59f1c1286bd8 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 841.433799] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735626, 'name': PowerOffVM_Task, 'duration_secs': 0.169792} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.434034] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 841.434288] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 841.435090] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f4cf86-04aa-4aae-8b40-a8436a86c61d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.445959] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735627, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.448096] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 841.448350] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca66fc81-fbaa-42ba-a9e1-ba674c23c614 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.487768] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 841.487987] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 841.488195] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Deleting the datastore file [datastore1] 13e0ca05-3ab3-43e2-8b0d-8045e26d6723 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 841.488522] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bfd86c5a-fc6a-4555-95ec-0486a678714d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.494968] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 841.494968] env[63028]: value = "task-2735631" [ 841.494968] env[63028]: _type = "Task" [ 841.494968] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.503770] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.519284] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735628, 'name': CreateVM_Task, 'duration_secs': 0.354803} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.519797] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 841.520544] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.520682] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.521023] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 841.523880] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07821ffa-4d1f-46b8-a448-e0837966dc39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.525459] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077853} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.525760] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 841.526708] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebe37ce-89df-4fd9-9b12-352afcf5a132 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.530233] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 841.530233] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522f6ca2-cbeb-ed6d-241a-0e02c821a40e" [ 841.530233] env[63028]: _type = "Task" [ 841.530233] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.550657] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] ba57ed92-aaef-460c-bd45-d0cbe09e4615/ba57ed92-aaef-460c-bd45-d0cbe09e4615.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 841.553551] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3ced708-cfa6-4037-b63c-a8251aa74860 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.572283] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522f6ca2-cbeb-ed6d-241a-0e02c821a40e, 'name': SearchDatastore_Task, 'duration_secs': 0.009266} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.573300] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.573300] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.573300] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.573457] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.573529] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.573793] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49e10dc8-f69c-47da-a3e5-0916b6d3ebc6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.577422] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 841.577422] env[63028]: value = "task-2735632" [ 841.577422] env[63028]: _type = "Task" [ 841.577422] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.585316] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735632, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.591069] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.591287] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.592246] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5248b373-0bdc-484a-8d07-592d949c6c0c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.598194] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 841.598194] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527bd982-9a3d-2ee2-11e3-25899caed708" [ 841.598194] env[63028]: _type = "Task" [ 841.598194] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.609960] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527bd982-9a3d-2ee2-11e3-25899caed708, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.785680] env[63028]: INFO nova.compute.manager [-] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Took 1.36 seconds to deallocate network for instance. [ 841.808864] env[63028]: DEBUG nova.network.neutron [req-2e540bd9-7a6c-449f-bdd5-b8d55b665a93 req-d72ba021-2352-460f-bcd1-0f058f097a70 service nova] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Updated VIF entry in instance network info cache for port 9c71a142-583d-4c67-95dc-f09f116bf3c5. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.809157] env[63028]: DEBUG nova.network.neutron [req-2e540bd9-7a6c-449f-bdd5-b8d55b665a93 req-d72ba021-2352-460f-bcd1-0f058f097a70 service nova] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Updating instance_info_cache with network_info: [{"id": "9c71a142-583d-4c67-95dc-f09f116bf3c5", "address": "fa:16:3e:c3:2b:82", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c71a142-58", "ovs_interfaceid": "9c71a142-583d-4c67-95dc-f09f116bf3c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.852337] env[63028]: DEBUG nova.compute.manager [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 841.885724] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 9773ad95-1894-471d-8020-c7952eac4be4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 841.886089] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 52b19182-a7e2-4461-b4eb-e6cd8a30024e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 841.946553] env[63028]: DEBUG oslo_vmware.api [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735627, 'name': PowerOnVM_Task, 'duration_secs': 0.565469} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.946833] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 841.947060] env[63028]: DEBUG nova.compute.manager [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 841.947826] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e7f530-3c91-4567-b81d-ab531558ca60 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.004470] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125322} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.004736] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 842.004920] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 842.005111] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 842.087417] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735632, 'name': ReconfigVM_Task, 'duration_secs': 0.289651} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.087623] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Reconfigured VM instance instance-0000001c to attach disk [datastore1] ba57ed92-aaef-460c-bd45-d0cbe09e4615/ba57ed92-aaef-460c-bd45-d0cbe09e4615.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 842.088501] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f8dda60-6b1b-4b58-b88c-958f1561ef27 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.094559] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 842.094559] env[63028]: value = "task-2735633" [ 842.094559] env[63028]: _type = "Task" [ 842.094559] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.104725] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735633, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.109638] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527bd982-9a3d-2ee2-11e3-25899caed708, 'name': SearchDatastore_Task, 'duration_secs': 0.020449} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.110383] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76325706-a7c5-45c0-83e2-f1e3f3c1597b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.114957] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 842.114957] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5273ad62-6513-830f-ea59-e556f0758753" [ 842.114957] env[63028]: _type = "Task" [ 842.114957] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.122598] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5273ad62-6513-830f-ea59-e556f0758753, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.294159] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.312037] env[63028]: DEBUG oslo_concurrency.lockutils [req-2e540bd9-7a6c-449f-bdd5-b8d55b665a93 req-d72ba021-2352-460f-bcd1-0f058f097a70 service nova] Releasing lock "refresh_cache-022125c4-2b0c-4a2c-ae63-18968887316e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.374874] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.391797] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 79f4ef22-a589-4d5c-8832-5d5dcdd55561 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 842.465962] env[63028]: DEBUG oslo_concurrency.lockutils [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.604266] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735633, 'name': Rename_Task, 'duration_secs': 0.145783} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.604594] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 842.604873] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a4eb71c-4703-4c55-bf9e-416757a23970 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.611293] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 842.611293] env[63028]: value = "task-2735634" [ 842.611293] env[63028]: _type = "Task" [ 842.611293] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.620989] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735634, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.627772] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5273ad62-6513-830f-ea59-e556f0758753, 'name': SearchDatastore_Task, 'duration_secs': 0.011259} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.628012] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.628270] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 022125c4-2b0c-4a2c-ae63-18968887316e/022125c4-2b0c-4a2c-ae63-18968887316e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 842.628511] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04e739bd-c1ad-484a-9297-e01fca9c2280 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.633677] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 842.633677] env[63028]: value = "task-2735635" [ 842.633677] env[63028]: _type = "Task" [ 842.633677] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.640412] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735635, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.682851] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50643b9c-8caa-4cba-adf0-3bd9ac7ec4c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.701836] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance '52b19182-a7e2-4461-b4eb-e6cd8a30024e' progress to 0 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 842.894475] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance a50e1167-d8ed-4099-83c3-a5066ab0be1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.043213] env[63028]: DEBUG nova.virt.hardware [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 843.043400] env[63028]: DEBUG nova.virt.hardware [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 843.043613] env[63028]: DEBUG nova.virt.hardware [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 843.043872] env[63028]: DEBUG nova.virt.hardware [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 843.044131] env[63028]: DEBUG nova.virt.hardware [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 843.044378] env[63028]: DEBUG nova.virt.hardware [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 843.044685] env[63028]: DEBUG nova.virt.hardware [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 843.044952] env[63028]: DEBUG nova.virt.hardware [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 843.045256] env[63028]: DEBUG nova.virt.hardware [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 843.045490] env[63028]: DEBUG nova.virt.hardware [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 843.045741] env[63028]: DEBUG nova.virt.hardware [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 843.049028] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d4c457-d021-4543-9cbc-2de2b96312cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.058095] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04caea19-421d-407f-85ce-c88d2de975bf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.074117] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Instance VIF info [] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 843.079926] env[63028]: DEBUG oslo.service.loopingcall [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.080275] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 843.080543] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04822a67-bba0-4555-9bb1-7e90605d5714 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.100790] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 843.100790] env[63028]: value = "task-2735636" [ 843.100790] env[63028]: _type = "Task" [ 843.100790] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.110072] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735636, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.121588] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735634, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.144242] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735635, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.210286] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 843.213709] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fbf15011-26ed-4730-a9d8-1a316da2b66e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.221608] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 843.221608] env[63028]: value = "task-2735637" [ 843.221608] env[63028]: _type = "Task" [ 843.221608] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.234431] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735637, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.397541] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 1d008794-3c1a-46c6-b4eb-3d5441efdb22 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.521580] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "514c83d1-4fb1-435c-8c25-aa112c744131" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.522385] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "514c83d1-4fb1-435c-8c25-aa112c744131" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.522385] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "514c83d1-4fb1-435c-8c25-aa112c744131-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.522530] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "514c83d1-4fb1-435c-8c25-aa112c744131-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.522752] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "514c83d1-4fb1-435c-8c25-aa112c744131-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.525096] env[63028]: INFO nova.compute.manager [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Terminating instance [ 843.609765] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735636, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.621066] env[63028]: DEBUG oslo_vmware.api [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735634, 'name': PowerOnVM_Task, 'duration_secs': 0.778464} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.621173] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 843.621407] env[63028]: DEBUG nova.compute.manager [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 843.622279] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f23f47-9a63-402b-98ec-7396ae6c1711 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.642697] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735635, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557127} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.642955] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 022125c4-2b0c-4a2c-ae63-18968887316e/022125c4-2b0c-4a2c-ae63-18968887316e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.643180] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.643421] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35fbf357-0103-4d06-921f-540d30c72e07 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.650324] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 843.650324] env[63028]: value = "task-2735638" [ 843.650324] env[63028]: _type = "Task" [ 843.650324] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.658342] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735638, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.716232] env[63028]: DEBUG oslo_concurrency.lockutils [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "56d39801-f3e7-4cfe-a038-6a5e762bfda8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.716468] env[63028]: DEBUG oslo_concurrency.lockutils [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "56d39801-f3e7-4cfe-a038-6a5e762bfda8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.716679] env[63028]: DEBUG oslo_concurrency.lockutils [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "56d39801-f3e7-4cfe-a038-6a5e762bfda8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.716859] env[63028]: DEBUG oslo_concurrency.lockutils [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "56d39801-f3e7-4cfe-a038-6a5e762bfda8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.717040] env[63028]: DEBUG oslo_concurrency.lockutils [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "56d39801-f3e7-4cfe-a038-6a5e762bfda8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.719164] env[63028]: INFO nova.compute.manager [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Terminating instance [ 843.733159] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735637, 'name': PowerOffVM_Task, 'duration_secs': 0.254148} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.733456] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 843.733756] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance '52b19182-a7e2-4461-b4eb-e6cd8a30024e' progress to 17 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 843.900913] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance f0ca0d73-d428-4b8c-acac-a80b7b7dd793 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 844.029222] env[63028]: DEBUG nova.compute.manager [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 844.029470] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 844.030438] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4d0648-731d-4f3f-acfb-78c646e2ed3b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.038208] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 844.038552] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-274df4a0-17e8-437f-854a-75e9d5aa9089 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.045641] env[63028]: DEBUG oslo_vmware.api [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 844.045641] env[63028]: value = "task-2735639" [ 844.045641] env[63028]: _type = "Task" [ 844.045641] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.054118] env[63028]: DEBUG oslo_vmware.api [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735639, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.078786] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.078786] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.111672] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735636, 'name': CreateVM_Task, 'duration_secs': 0.566018} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.111844] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 844.112496] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.112496] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.112759] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 844.113044] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-455f18a5-c75d-48e4-b730-84e36de13804 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.118364] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 844.118364] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5232f5d4-68ff-eab1-46e6-80a3d2c4035d" [ 844.118364] env[63028]: _type = "Task" [ 844.118364] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.127541] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5232f5d4-68ff-eab1-46e6-80a3d2c4035d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.137732] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.160706] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735638, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.146511} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.160974] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.161804] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b2a909-20c8-474b-b3b1-a1829780f41f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.184622] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 022125c4-2b0c-4a2c-ae63-18968887316e/022125c4-2b0c-4a2c-ae63-18968887316e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.184931] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3aef4db-5ba1-4336-88db-60edac6380ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.204995] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 844.204995] env[63028]: value = "task-2735640" [ 844.204995] env[63028]: _type = "Task" [ 844.204995] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.214145] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735640, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.223035] env[63028]: DEBUG nova.compute.manager [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 844.223320] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 844.224148] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8151f27-1fa2-4a54-a726-0c8f11860501 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.231320] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 844.231624] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77ff5992-0c41-4649-99f6-dbdf9c4877b2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.240439] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 844.240830] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.241121] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 844.241448] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.241738] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 844.242019] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 844.242417] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 844.242732] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 844.243182] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 844.243383] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 844.243766] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 844.249325] env[63028]: DEBUG oslo_vmware.api [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 844.249325] env[63028]: value = "task-2735641" [ 844.249325] env[63028]: _type = "Task" [ 844.249325] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.249559] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa36844a-281a-4354-98b6-c5848e1d2f01 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.269843] env[63028]: DEBUG oslo_vmware.api [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735641, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.271406] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 844.271406] env[63028]: value = "task-2735642" [ 844.271406] env[63028]: _type = "Task" [ 844.271406] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.282153] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735642, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.403814] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 3b90dbb8-66ce-435f-beae-5464720bfb3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 844.556270] env[63028]: DEBUG oslo_vmware.api [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735639, 'name': PowerOffVM_Task, 'duration_secs': 0.205738} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.556545] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 844.556718] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 844.556993] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a2e474c4-8deb-4527-b6ca-4692f3b84e43 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.629640] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5232f5d4-68ff-eab1-46e6-80a3d2c4035d, 'name': SearchDatastore_Task, 'duration_secs': 0.026485} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.630876] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.631160] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 844.631403] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.631831] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.631831] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 844.632042] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 844.632223] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 844.632387] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleting the datastore file [datastore1] 514c83d1-4fb1-435c-8c25-aa112c744131 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 844.632606] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-138c793e-6963-4ef5-a1e1-0f7b2a628a02 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.635131] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61169be5-bb79-40d0-83f1-5a4d08929f99 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.641466] env[63028]: DEBUG oslo_vmware.api [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 844.641466] env[63028]: value = "task-2735644" [ 844.641466] env[63028]: _type = "Task" [ 844.641466] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.649183] env[63028]: DEBUG oslo_vmware.api [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.649976] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 844.650162] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 844.650900] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b688e7e-b452-4a19-8403-854271e2992b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.655472] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 844.655472] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f5cdfe-a165-8b35-c692-922fc9fb99fe" [ 844.655472] env[63028]: _type = "Task" [ 844.655472] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.662571] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f5cdfe-a165-8b35-c692-922fc9fb99fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.716197] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735640, 'name': ReconfigVM_Task, 'duration_secs': 0.394412} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.716597] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 022125c4-2b0c-4a2c-ae63-18968887316e/022125c4-2b0c-4a2c-ae63-18968887316e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.717456] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1312df39-8d0f-4518-b379-1a301363a85f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.727033] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 844.727033] env[63028]: value = "task-2735645" [ 844.727033] env[63028]: _type = "Task" [ 844.727033] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.737714] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735645, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.770802] env[63028]: DEBUG oslo_vmware.api [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735641, 'name': PowerOffVM_Task, 'duration_secs': 0.241142} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.771109] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 844.771287] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 844.771599] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-625b0bd3-680b-43e8-9cc5-9925fad4eec8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.783462] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735642, 'name': ReconfigVM_Task, 'duration_secs': 0.318931} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.783773] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance '52b19182-a7e2-4461-b4eb-e6cd8a30024e' progress to 33 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 844.840816] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 844.840816] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 844.841011] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Deleting the datastore file [datastore2] 56d39801-f3e7-4cfe-a038-6a5e762bfda8 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 844.841283] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fbff112-f5e3-4344-b8b1-c8319bbb7567 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.847316] env[63028]: DEBUG oslo_vmware.api [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 844.847316] env[63028]: value = "task-2735647" [ 844.847316] env[63028]: _type = "Task" [ 844.847316] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.855947] env[63028]: DEBUG oslo_vmware.api [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735647, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.907056] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance a2f7d7c6-7931-4b21-a29c-bb9965577210 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 845.152315] env[63028]: DEBUG oslo_vmware.api [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.32079} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.154021] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 845.154021] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 845.154021] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 845.154021] env[63028]: INFO nova.compute.manager [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Took 1.12 seconds to destroy the instance on the hypervisor. [ 845.154547] env[63028]: DEBUG oslo.service.loopingcall [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.154763] env[63028]: DEBUG nova.compute.manager [-] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 845.154875] env[63028]: DEBUG nova.network.neutron [-] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 845.165990] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f5cdfe-a165-8b35-c692-922fc9fb99fe, 'name': SearchDatastore_Task, 'duration_secs': 0.042599} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.168060] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-194e1041-9c99-4b7e-935f-d2d6d7f43769 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.175029] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 845.175029] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5204b58a-da29-9285-9808-50537cb9830c" [ 845.175029] env[63028]: _type = "Task" [ 845.175029] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.182690] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5204b58a-da29-9285-9808-50537cb9830c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.236291] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735645, 'name': Rename_Task, 'duration_secs': 0.163185} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.236839] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 845.237137] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cd0f2fb-7a9c-47b9-9c53-de46711a8e8f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.243363] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 845.243363] env[63028]: value = "task-2735648" [ 845.243363] env[63028]: _type = "Task" [ 845.243363] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.252478] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735648, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.290606] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:57:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bd41cb6c-4f59-47c8-83b3-1b15a3032d86',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-149043815',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 845.290866] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.291037] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 845.291228] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.291379] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 845.291613] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 845.291847] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 845.292015] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 845.292187] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 845.292351] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 845.292528] env[63028]: DEBUG nova.virt.hardware [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 845.297947] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Reconfiguring VM instance instance-00000037 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 845.298269] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58ad0dea-4da5-4e9a-a229-87d6be8a6b29 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.316691] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 845.316691] env[63028]: value = "task-2735649" [ 845.316691] env[63028]: _type = "Task" [ 845.316691] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.325356] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735649, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.356360] env[63028]: DEBUG oslo_vmware.api [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735647, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.426039} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.356615] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 845.356807] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 845.356975] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 845.357169] env[63028]: INFO nova.compute.manager [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Took 1.13 seconds to destroy the instance on the hypervisor. [ 845.357417] env[63028]: DEBUG oslo.service.loopingcall [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.357604] env[63028]: DEBUG nova.compute.manager [-] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 845.357695] env[63028]: DEBUG nova.network.neutron [-] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 845.413052] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 70147f2f-0b5e-4343-84e4-8bc195a5485d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 845.551611] env[63028]: DEBUG nova.compute.manager [req-e8e5d454-fd18-4811-b7b1-8b10974aebad req-f0f387a9-73b9-429d-a5e6-bc2fe8c0bed6 service nova] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Received event network-vif-deleted-1e514afb-c9a6-43c6-bfec-ac1a32620674 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 845.551815] env[63028]: INFO nova.compute.manager [req-e8e5d454-fd18-4811-b7b1-8b10974aebad req-f0f387a9-73b9-429d-a5e6-bc2fe8c0bed6 service nova] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Neutron deleted interface 1e514afb-c9a6-43c6-bfec-ac1a32620674; detaching it from the instance and deleting it from the info cache [ 845.551986] env[63028]: DEBUG nova.network.neutron [req-e8e5d454-fd18-4811-b7b1-8b10974aebad req-f0f387a9-73b9-429d-a5e6-bc2fe8c0bed6 service nova] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.643068] env[63028]: INFO nova.compute.manager [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Rebuilding instance [ 845.689292] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5204b58a-da29-9285-9808-50537cb9830c, 'name': SearchDatastore_Task, 'duration_secs': 0.013165} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.693087] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.693523] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 13e0ca05-3ab3-43e2-8b0d-8045e26d6723/13e0ca05-3ab3-43e2-8b0d-8045e26d6723.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 845.696532] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84b6e0d5-6ae9-49d2-a87e-12c7fd8f5064 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.699029] env[63028]: DEBUG nova.compute.manager [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 845.699865] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8d0020-a8c2-4cde-b89b-176be133b2e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.711024] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 845.711024] env[63028]: value = "task-2735650" [ 845.711024] env[63028]: _type = "Task" [ 845.711024] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.724881] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.753756] env[63028]: DEBUG oslo_vmware.api [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735648, 'name': PowerOnVM_Task, 'duration_secs': 0.47705} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.754010] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.754222] env[63028]: INFO nova.compute.manager [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Took 7.41 seconds to spawn the instance on the hypervisor. [ 845.754396] env[63028]: DEBUG nova.compute.manager [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 845.755163] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259c3394-b689-4909-827d-bbfab7881ed0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.827471] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735649, 'name': ReconfigVM_Task, 'duration_secs': 0.230575} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.827765] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Reconfigured VM instance instance-00000037 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 845.828601] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aadce11-8ecd-4983-84ad-4e10e76b1fa3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.850678] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 52b19182-a7e2-4461-b4eb-e6cd8a30024e/52b19182-a7e2-4461-b4eb-e6cd8a30024e.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.850957] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25ea7f5a-0312-4d17-9213-8eb88486f64f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.868192] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 845.868192] env[63028]: value = "task-2735651" [ 845.868192] env[63028]: _type = "Task" [ 845.868192] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.875798] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735651, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.919150] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance b77ba7d6-305e-4b60-a4b7-9353c12c3920 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 845.919628] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Migration b2428275-4704-4b98-81dd-222c963eb311 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 845.919628] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance c06813c4-472d-4bf9-84ec-0d01306bcd48 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 845.919909] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Total usable vcpus: 48, total allocated vcpus: 25 {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 845.919909] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=5440MB phys_disk=200GB used_disk=25GB total_vcpus=48 used_vcpus=25 pci_stats=[] {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 846.038558] env[63028]: DEBUG nova.network.neutron [-] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.055042] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7a699bb-3920-4317-9f59-50227b4fbdc7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.068912] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8b1a20-776d-4b52-96f1-0c53752f76d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.117365] env[63028]: DEBUG nova.compute.manager [req-e8e5d454-fd18-4811-b7b1-8b10974aebad req-f0f387a9-73b9-429d-a5e6-bc2fe8c0bed6 service nova] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Detach interface failed, port_id=1e514afb-c9a6-43c6-bfec-ac1a32620674, reason: Instance 514c83d1-4fb1-435c-8c25-aa112c744131 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 846.171652] env[63028]: DEBUG nova.network.neutron [-] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.226124] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735650, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.276046] env[63028]: INFO nova.compute.manager [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Took 43.17 seconds to build instance. [ 846.378867] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735651, 'name': ReconfigVM_Task, 'duration_secs': 0.411405} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.381494] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 52b19182-a7e2-4461-b4eb-e6cd8a30024e/52b19182-a7e2-4461-b4eb-e6cd8a30024e.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.382203] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance '52b19182-a7e2-4461-b4eb-e6cd8a30024e' progress to 50 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 846.497624] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e933a258-acdc-4e61-9f9c-8fc8902fc736 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.505876] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726e544f-657c-410a-9d26-7685cfbd181a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.539404] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f38955-fdff-49ed-8fc5-976e56e81865 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.542228] env[63028]: INFO nova.compute.manager [-] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Took 1.39 seconds to deallocate network for instance. [ 846.550289] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453e626a-5cb8-4249-aee9-6c02f8c41ddc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.564482] env[63028]: DEBUG nova.compute.provider_tree [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.674629] env[63028]: INFO nova.compute.manager [-] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Took 1.32 seconds to deallocate network for instance. [ 846.725340] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735650, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.634699} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.725599] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 13e0ca05-3ab3-43e2-8b0d-8045e26d6723/13e0ca05-3ab3-43e2-8b0d-8045e26d6723.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 846.725811] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 846.726066] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6598ddd-52da-4f11-bc06-b7630b3dd886 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.730564] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 846.730811] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e06fb732-bb31-44f2-beb3-c834b3b6bd38 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.733525] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 846.733525] env[63028]: value = "task-2735652" [ 846.733525] env[63028]: _type = "Task" [ 846.733525] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.738677] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 846.738677] env[63028]: value = "task-2735653" [ 846.738677] env[63028]: _type = "Task" [ 846.738677] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.745625] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735652, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.750772] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735653, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.778576] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c28be328-46e8-4c9b-904e-d46ed583a88b tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "022125c4-2b0c-4a2c-ae63-18968887316e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.285s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.889015] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b851c18-a07c-49eb-96a7-7ae0fa712d62 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.908691] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60ce6a5-502b-48ad-988a-c02461fee9b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.928125] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance '52b19182-a7e2-4461-b4eb-e6cd8a30024e' progress to 67 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 846.940568] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99771d85-8b0a-426a-bfff-4e3cf19226a2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.946538] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-976d1ce9-3f0f-449f-a82b-9aa77fe13f34 tempest-ServersAdminNegativeTestJSON-15191423 tempest-ServersAdminNegativeTestJSON-15191423-project-admin] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Suspending the VM {{(pid=63028) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 846.946763] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ea94e14b-8ee5-44e1-ab51-3cd4c99b57c9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.952481] env[63028]: DEBUG oslo_vmware.api [None req-976d1ce9-3f0f-449f-a82b-9aa77fe13f34 tempest-ServersAdminNegativeTestJSON-15191423 tempest-ServersAdminNegativeTestJSON-15191423-project-admin] Waiting for the task: (returnval){ [ 846.952481] env[63028]: value = "task-2735654" [ 846.952481] env[63028]: _type = "Task" [ 846.952481] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.960796] env[63028]: DEBUG oslo_vmware.api [None req-976d1ce9-3f0f-449f-a82b-9aa77fe13f34 tempest-ServersAdminNegativeTestJSON-15191423 tempest-ServersAdminNegativeTestJSON-15191423-project-admin] Task: {'id': task-2735654, 'name': SuspendVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.050685] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.070980] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 847.181197] env[63028]: DEBUG oslo_concurrency.lockutils [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.246450] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735652, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085017} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.247095] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.247913] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221f7c6e-5eda-43a9-b8d6-e1eabe82a845 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.254331] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735653, 'name': PowerOffVM_Task, 'duration_secs': 0.182292} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.254993] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 847.255256] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 847.256055] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94b0360-59d1-45b0-807c-a152e9d96c0e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.275041] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 13e0ca05-3ab3-43e2-8b0d-8045e26d6723/13e0ca05-3ab3-43e2-8b0d-8045e26d6723.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.275887] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-316306eb-37a3-40f4-97a7-9e82e7d2054f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.293021] env[63028]: DEBUG nova.compute.manager [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 847.295714] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.296395] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1af06328-edb5-4f40-a4b8-4e4a4dbdaac2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.301895] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 847.301895] env[63028]: value = "task-2735655" [ 847.301895] env[63028]: _type = "Task" [ 847.301895] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.310020] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735655, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.463356] env[63028]: DEBUG oslo_vmware.api [None req-976d1ce9-3f0f-449f-a82b-9aa77fe13f34 tempest-ServersAdminNegativeTestJSON-15191423 tempest-ServersAdminNegativeTestJSON-15191423-project-admin] Task: {'id': task-2735654, 'name': SuspendVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.474808] env[63028]: DEBUG nova.network.neutron [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Port 7dd5be5d-a88d-4dcd-a42d-7842895207f7 binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 847.576969] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 847.577287] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.796s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.577587] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.384s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.579679] env[63028]: INFO nova.compute.claims [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.582472] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 847.582624] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Cleaning up deleted instances {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 847.629765] env[63028]: DEBUG nova.compute.manager [req-a6d0666a-02ea-4e00-83f9-9d03de4eec56 req-716792c8-b69e-4e43-80e0-5c2ad9c136be service nova] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Received event network-vif-deleted-333fcbb4-150d-4ee3-bd25-8ef1d049b40d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 847.813518] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735655, 'name': ReconfigVM_Task, 'duration_secs': 0.385275} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.813880] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 13e0ca05-3ab3-43e2-8b0d-8045e26d6723/13e0ca05-3ab3-43e2-8b0d-8045e26d6723.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 847.814550] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-261c7f0d-d4b0-402f-bf1e-1a536aeabf8f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.820970] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 847.820970] env[63028]: value = "task-2735657" [ 847.820970] env[63028]: _type = "Task" [ 847.820970] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.824646] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.829394] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735657, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.966773] env[63028]: DEBUG oslo_vmware.api [None req-976d1ce9-3f0f-449f-a82b-9aa77fe13f34 tempest-ServersAdminNegativeTestJSON-15191423 tempest-ServersAdminNegativeTestJSON-15191423-project-admin] Task: {'id': task-2735654, 'name': SuspendVM_Task, 'duration_secs': 0.668455} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.967201] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-976d1ce9-3f0f-449f-a82b-9aa77fe13f34 tempest-ServersAdminNegativeTestJSON-15191423 tempest-ServersAdminNegativeTestJSON-15191423-project-admin] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Suspended the VM {{(pid=63028) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 847.967491] env[63028]: DEBUG nova.compute.manager [None req-976d1ce9-3f0f-449f-a82b-9aa77fe13f34 tempest-ServersAdminNegativeTestJSON-15191423 tempest-ServersAdminNegativeTestJSON-15191423-project-admin] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 847.968686] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8668b5df-699b-4dfd-8c79-e31757d2e090 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.098064] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] There are 38 instances to clean {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 848.098064] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: af87f1a5-b413-4b26-be91-474ad1f73df8] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 848.333521] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735657, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.497378] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.497693] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.497929] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.603059] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: a4b0d948-d950-414a-b23f-faefa5ab038c] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 848.753640] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 848.754386] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 848.754386] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleting the datastore file [datastore1] ba57ed92-aaef-460c-bd45-d0cbe09e4615 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 848.754386] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6511cdfd-c621-4e72-a202-0c58131ae920 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.761995] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 848.761995] env[63028]: value = "task-2735658" [ 848.761995] env[63028]: _type = "Task" [ 848.761995] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.769833] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735658, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.839742] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735657, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.105817] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 50e4934b-b9b1-4887-b5d1-95a37fbf4c41] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 849.114239] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b603d50-a965-4a08-a6af-fd292cd06944 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.124179] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e9ae69-707a-4fa8-88e1-b96f7251dc40 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.166750] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a3dd4f-b9bf-4d3a-b2b4-881df40de643 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.175474] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cc3be8-8630-4a46-8156-694c350ab7e8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.190365] env[63028]: DEBUG nova.compute.provider_tree [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.274561] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735658, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224818} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.274878] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 849.275088] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 849.275272] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 849.334603] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735657, 'name': Rename_Task, 'duration_secs': 1.174434} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.334603] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.334603] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11ba2b92-aec9-475e-afd8-d7d82811418d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.340920] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 849.340920] env[63028]: value = "task-2735659" [ 849.340920] env[63028]: _type = "Task" [ 849.340920] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.351298] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735659, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.565341] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.565601] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.565830] env[63028]: DEBUG nova.network.neutron [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 849.615183] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 352ac7c3-17a8-4d7e-a66f-47ea7614892c] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 849.694178] env[63028]: DEBUG nova.scheduler.client.report [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 849.851538] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735659, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.105879] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "022125c4-2b0c-4a2c-ae63-18968887316e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.106191] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "022125c4-2b0c-4a2c-ae63-18968887316e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.106354] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "022125c4-2b0c-4a2c-ae63-18968887316e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.106534] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "022125c4-2b0c-4a2c-ae63-18968887316e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.106707] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "022125c4-2b0c-4a2c-ae63-18968887316e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.108716] env[63028]: INFO nova.compute.manager [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Terminating instance [ 850.118945] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: e2d39c43-6666-4fda-b8e2-485399c59e46] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 850.199422] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.622s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.200065] env[63028]: DEBUG nova.compute.manager [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 850.202676] env[63028]: DEBUG oslo_concurrency.lockutils [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.130s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.202897] env[63028]: DEBUG oslo_concurrency.lockutils [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.205270] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.111s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.206728] env[63028]: INFO nova.compute.claims [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 850.232088] env[63028]: INFO nova.scheduler.client.report [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Deleted allocations for instance 5982cd5d-abf1-42d4-bb44-8d79de599f11 [ 850.284194] env[63028]: DEBUG nova.network.neutron [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance_info_cache with network_info: [{"id": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "address": "fa:16:3e:45:d9:fe", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dd5be5d-a8", "ovs_interfaceid": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.318839] env[63028]: DEBUG nova.virt.hardware [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 850.319109] env[63028]: DEBUG nova.virt.hardware [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 850.319270] env[63028]: DEBUG nova.virt.hardware [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 850.319451] env[63028]: DEBUG nova.virt.hardware [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 850.319599] env[63028]: DEBUG nova.virt.hardware [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 850.319746] env[63028]: DEBUG nova.virt.hardware [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 850.320067] env[63028]: DEBUG nova.virt.hardware [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 850.320266] env[63028]: DEBUG nova.virt.hardware [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 850.320363] env[63028]: DEBUG nova.virt.hardware [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 850.320558] env[63028]: DEBUG nova.virt.hardware [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 850.320722] env[63028]: DEBUG nova.virt.hardware [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 850.321603] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c02f85-64d0-477b-877e-c777bf1b256f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.330117] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e966e7a-bdf1-4164-8692-86ff3c52091f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.344301] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:3c:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66421979-b107-4dd5-9bc4-40bdefa3a5d0', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 850.355779] env[63028]: DEBUG oslo.service.loopingcall [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.360442] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 850.360800] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2ecb7b9-f2a6-4171-aa4d-ac4a82704fd4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.390855] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735659, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.392269] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 850.392269] env[63028]: value = "task-2735660" [ 850.392269] env[63028]: _type = "Task" [ 850.392269] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.402094] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735660, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.612528] env[63028]: DEBUG nova.compute.manager [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 850.613451] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.613773] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6132599-6afc-4369-8174-b18e34df325c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.621022] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 850.621427] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 0d96ba8e-b46b-48ae-957c-cdc49762c395] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 850.623109] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c38fcd83-4e8c-4e56-a1f0-d606afe243fc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.709471] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 850.709703] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 850.709895] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Deleting the datastore file [datastore1] 022125c4-2b0c-4a2c-ae63-18968887316e {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.711306] env[63028]: DEBUG nova.compute.utils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 850.712662] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b7e53d4-ca48-4939-a5fe-745290c3c54d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.716503] env[63028]: DEBUG nova.compute.manager [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 850.716669] env[63028]: DEBUG nova.network.neutron [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 850.724117] env[63028]: DEBUG oslo_vmware.api [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 850.724117] env[63028]: value = "task-2735662" [ 850.724117] env[63028]: _type = "Task" [ 850.724117] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.733444] env[63028]: DEBUG oslo_vmware.api [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735662, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.740594] env[63028]: DEBUG oslo_concurrency.lockutils [None req-587ce72e-e9e6-43e2-b1d2-881ecb2cd93a tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "5982cd5d-abf1-42d4-bb44-8d79de599f11" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.209s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.762290] env[63028]: DEBUG nova.policy [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a5a0263e379494aa906d35f2fe1202f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97060d5fb7e8454eadaf5dc9b426a248', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 850.789189] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.854521] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735659, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.902776] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735660, 'name': CreateVM_Task, 'duration_secs': 0.315128} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.903357] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 850.904821] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.904821] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.904989] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 850.905291] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42bd6c4f-9111-44a1-aefc-a881e36e13a6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.913702] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 850.913702] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e8de99-8651-c714-9f46-9e93fb3db7fd" [ 850.913702] env[63028]: _type = "Task" [ 850.913702] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.925085] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e8de99-8651-c714-9f46-9e93fb3db7fd, 'name': SearchDatastore_Task, 'duration_secs': 0.009764} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.925402] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.925637] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 850.926107] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.926265] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.926450] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.926887] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c609e87-7866-4adc-80f9-46bd596fb0b9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.936376] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.936835] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 850.938462] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e75a3ab3-6a71-4e99-87f6-e575fc97d808 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.944244] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 850.944244] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f7cc20-6a47-0613-4035-8ef6bf8f136b" [ 850.944244] env[63028]: _type = "Task" [ 850.944244] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.953293] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f7cc20-6a47-0613-4035-8ef6bf8f136b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.129022] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 22713da1-ae53-4bbe-ae55-2490440cbd87] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 851.160996] env[63028]: DEBUG nova.network.neutron [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Successfully created port: e7cad309-0a34-4148-9d0b-e47549d8689d {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 851.215535] env[63028]: DEBUG nova.compute.manager [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 851.235930] env[63028]: DEBUG oslo_vmware.api [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735662, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158587} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.238843] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 851.238843] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 851.238843] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 851.238843] env[63028]: INFO nova.compute.manager [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Took 0.62 seconds to destroy the instance on the hypervisor. [ 851.238843] env[63028]: DEBUG oslo.service.loopingcall [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.238843] env[63028]: DEBUG nova.compute.manager [-] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 851.238843] env[63028]: DEBUG nova.network.neutron [-] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 851.317895] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97a4997-45e9-4093-8eaf-f95777c85fb6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.342446] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6ab186-c3c0-4818-8b9c-674c673c6dca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.353589] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance '52b19182-a7e2-4461-b4eb-e6cd8a30024e' progress to 83 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 851.362019] env[63028]: DEBUG oslo_vmware.api [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735659, 'name': PowerOnVM_Task, 'duration_secs': 1.521197} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.363248] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.363485] env[63028]: DEBUG nova.compute.manager [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 851.366010] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07d36a3-5831-4810-90a6-4c307e6d016d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.456489] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f7cc20-6a47-0613-4035-8ef6bf8f136b, 'name': SearchDatastore_Task, 'duration_secs': 0.008508} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.457289] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00a0ebc7-0fe0-4aa5-8b16-d7a5250edcaa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.465180] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 851.465180] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f06262-18e2-57b4-e1c3-b4f55ede61b8" [ 851.465180] env[63028]: _type = "Task" [ 851.465180] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.475318] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f06262-18e2-57b4-e1c3-b4f55ede61b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.630633] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 1af19279-e75b-4ec5-91f1-a0a101b229b2] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 851.665331] env[63028]: DEBUG nova.network.neutron [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Successfully created port: a9813eb4-922c-4f70-8c74-e7e5f11caf7d {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 851.668721] env[63028]: DEBUG nova.compute.manager [req-dd20724a-af3c-4f8b-a8e5-c0fcb40f6c50 req-c373918b-8faa-4a79-a3fa-b813d703e78f service nova] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Received event network-vif-deleted-9c71a142-583d-4c67-95dc-f09f116bf3c5 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 851.668721] env[63028]: INFO nova.compute.manager [req-dd20724a-af3c-4f8b-a8e5-c0fcb40f6c50 req-c373918b-8faa-4a79-a3fa-b813d703e78f service nova] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Neutron deleted interface 9c71a142-583d-4c67-95dc-f09f116bf3c5; detaching it from the instance and deleting it from the info cache [ 851.668721] env[63028]: DEBUG nova.network.neutron [req-dd20724a-af3c-4f8b-a8e5-c0fcb40f6c50 req-c373918b-8faa-4a79-a3fa-b813d703e78f service nova] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.726738] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "1316318e-8dcf-4ac2-b40a-6a3ab6964997" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.726983] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "1316318e-8dcf-4ac2-b40a-6a3ab6964997" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.727920] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "1316318e-8dcf-4ac2-b40a-6a3ab6964997-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.727920] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "1316318e-8dcf-4ac2-b40a-6a3ab6964997-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.727920] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "1316318e-8dcf-4ac2-b40a-6a3ab6964997-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.732108] env[63028]: INFO nova.compute.manager [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Terminating instance [ 851.832457] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d750793e-bc24-4547-bac0-2a2e51d124d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.840182] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17de4d7a-4c65-41fa-b83b-f99e0ea7606b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.873225] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 851.884024] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3352b27f-d586-4cda-b8d9-36eba629a405 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.884304] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5916c13-ec5f-4198-9549-b980129b60ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.886663] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.893326] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51713a21-6b89-4d75-8e40-9f4f5e1858a8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.897256] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 851.897256] env[63028]: value = "task-2735663" [ 851.897256] env[63028]: _type = "Task" [ 851.897256] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.909409] env[63028]: DEBUG nova.compute.provider_tree [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.915552] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735663, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.975557] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f06262-18e2-57b4-e1c3-b4f55ede61b8, 'name': SearchDatastore_Task, 'duration_secs': 0.009968} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.975839] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.976120] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] ba57ed92-aaef-460c-bd45-d0cbe09e4615/ba57ed92-aaef-460c-bd45-d0cbe09e4615.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 851.976384] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6917a098-d104-4e0f-b2ed-b273a3bc40f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.983186] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 851.983186] env[63028]: value = "task-2735664" [ 851.983186] env[63028]: _type = "Task" [ 851.983186] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.991750] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735664, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.136210] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: c0db2b2a-9c06-409c-b48b-a0d5c127f2dc] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 852.143810] env[63028]: DEBUG nova.network.neutron [-] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.171071] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e143be9f-d4af-4643-86e4-1b27aaf2c307 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.181894] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6d1b79-5a8f-488d-bd6a-8d21815287a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.220757] env[63028]: DEBUG nova.compute.manager [req-dd20724a-af3c-4f8b-a8e5-c0fcb40f6c50 req-c373918b-8faa-4a79-a3fa-b813d703e78f service nova] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Detach interface failed, port_id=9c71a142-583d-4c67-95dc-f09f116bf3c5, reason: Instance 022125c4-2b0c-4a2c-ae63-18968887316e could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 852.225470] env[63028]: DEBUG nova.compute.manager [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 852.235773] env[63028]: DEBUG nova.compute.manager [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 852.236400] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 852.237318] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d4b5a2-5127-474a-9903-f5980aa39e7c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.245887] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 852.248049] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e90e4dad-c7b0-4ae3-8294-61dc84a04d61 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.255058] env[63028]: DEBUG oslo_vmware.api [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 852.255058] env[63028]: value = "task-2735665" [ 852.255058] env[63028]: _type = "Task" [ 852.255058] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.257360] env[63028]: DEBUG nova.virt.hardware [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 852.257615] env[63028]: DEBUG nova.virt.hardware [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 852.257792] env[63028]: DEBUG nova.virt.hardware [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 852.258352] env[63028]: DEBUG nova.virt.hardware [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 852.258352] env[63028]: DEBUG nova.virt.hardware [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 852.258542] env[63028]: DEBUG nova.virt.hardware [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 852.258769] env[63028]: DEBUG nova.virt.hardware [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 852.258932] env[63028]: DEBUG nova.virt.hardware [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 852.259116] env[63028]: DEBUG nova.virt.hardware [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 852.259281] env[63028]: DEBUG nova.virt.hardware [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 852.259451] env[63028]: DEBUG nova.virt.hardware [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 852.260689] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c70615c-c23a-48f8-91aa-0c4a7b7921f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.277926] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0925065-4e61-49c2-afac-ddfc86062d01 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.282756] env[63028]: DEBUG oslo_vmware.api [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735665, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.408344] env[63028]: DEBUG oslo_vmware.api [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735663, 'name': PowerOnVM_Task, 'duration_secs': 0.441064} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.408654] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 852.408846] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bd3373-3038-4133-a676-f1792a1f8e56 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance '52b19182-a7e2-4461-b4eb-e6cd8a30024e' progress to 100 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 852.413916] env[63028]: DEBUG nova.scheduler.client.report [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 852.495447] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735664, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.639836] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: c3014718-1064-4ab9-9600-86490489ee4b] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 852.646173] env[63028]: INFO nova.compute.manager [-] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Took 1.41 seconds to deallocate network for instance. [ 852.769737] env[63028]: DEBUG oslo_vmware.api [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735665, 'name': PowerOffVM_Task, 'duration_secs': 0.236151} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.770045] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 852.770303] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 852.770498] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b63f5f5-75a1-4de7-ab01-86b0fa21bc28 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.830073] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 852.830373] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 852.830468] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Deleting the datastore file [datastore1] 1316318e-8dcf-4ac2-b40a-6a3ab6964997 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 852.831195] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c26fa0b-e1b5-492d-9967-09f2be73b2c1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.837049] env[63028]: DEBUG oslo_vmware.api [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for the task: (returnval){ [ 852.837049] env[63028]: value = "task-2735667" [ 852.837049] env[63028]: _type = "Task" [ 852.837049] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.846177] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquiring lock "13e0ca05-3ab3-43e2-8b0d-8045e26d6723" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.846408] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lock "13e0ca05-3ab3-43e2-8b0d-8045e26d6723" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.846604] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquiring lock "13e0ca05-3ab3-43e2-8b0d-8045e26d6723-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.846781] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lock "13e0ca05-3ab3-43e2-8b0d-8045e26d6723-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.846947] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lock "13e0ca05-3ab3-43e2-8b0d-8045e26d6723-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.848451] env[63028]: DEBUG oslo_vmware.api [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735667, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.848893] env[63028]: INFO nova.compute.manager [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Terminating instance [ 852.921564] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.716s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.922134] env[63028]: DEBUG nova.compute.manager [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 852.924914] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.961s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.926655] env[63028]: INFO nova.compute.claims [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 852.994415] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735664, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584053} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.994688] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] ba57ed92-aaef-460c-bd45-d0cbe09e4615/ba57ed92-aaef-460c-bd45-d0cbe09e4615.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 852.994907] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 852.995169] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13f41ddd-f0d9-4b00-aa63-27593801b10b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.001247] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 853.001247] env[63028]: value = "task-2735668" [ 853.001247] env[63028]: _type = "Task" [ 853.001247] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.009399] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735668, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.144368] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 8c7c8713-d5d7-490e-aba5-25d98bfbfaa0] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 853.156226] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.256428] env[63028]: DEBUG nova.network.neutron [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Successfully updated port: e7cad309-0a34-4148-9d0b-e47549d8689d {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 853.346889] env[63028]: DEBUG oslo_vmware.api [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735667, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.352617] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquiring lock "refresh_cache-13e0ca05-3ab3-43e2-8b0d-8045e26d6723" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.352783] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquired lock "refresh_cache-13e0ca05-3ab3-43e2-8b0d-8045e26d6723" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.352979] env[63028]: DEBUG nova.network.neutron [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.433371] env[63028]: DEBUG nova.compute.utils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 853.434847] env[63028]: DEBUG nova.compute.manager [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 853.435047] env[63028]: DEBUG nova.network.neutron [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 853.476076] env[63028]: DEBUG nova.policy [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab9cb927bc134277bb980682fef01978', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ef9a42771824708832a74238bbe89c0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 853.512629] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735668, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.271824} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.513230] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 853.514020] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a74c36d-971b-4432-b0a7-a33734a280c8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.542620] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] ba57ed92-aaef-460c-bd45-d0cbe09e4615/ba57ed92-aaef-460c-bd45-d0cbe09e4615.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 853.543129] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6def124-7929-44c6-a865-ca500ca8f24a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.565790] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 853.565790] env[63028]: value = "task-2735669" [ 853.565790] env[63028]: _type = "Task" [ 853.565790] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.574389] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735669, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.648650] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 8a09beac-4b54-4fbb-9bac-3dcfe4c21fb3] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 853.719228] env[63028]: DEBUG nova.compute.manager [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Received event network-vif-plugged-e7cad309-0a34-4148-9d0b-e47549d8689d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 853.719449] env[63028]: DEBUG oslo_concurrency.lockutils [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] Acquiring lock "4e859327-ccd3-440e-b884-67f6cdadf97f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.719654] env[63028]: DEBUG oslo_concurrency.lockutils [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] Lock "4e859327-ccd3-440e-b884-67f6cdadf97f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.719821] env[63028]: DEBUG oslo_concurrency.lockutils [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] Lock "4e859327-ccd3-440e-b884-67f6cdadf97f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.719985] env[63028]: DEBUG nova.compute.manager [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] No waiting events found dispatching network-vif-plugged-e7cad309-0a34-4148-9d0b-e47549d8689d {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 853.720162] env[63028]: WARNING nova.compute.manager [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Received unexpected event network-vif-plugged-e7cad309-0a34-4148-9d0b-e47549d8689d for instance with vm_state building and task_state spawning. [ 853.720321] env[63028]: DEBUG nova.compute.manager [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Received event network-changed-e7cad309-0a34-4148-9d0b-e47549d8689d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 853.720470] env[63028]: DEBUG nova.compute.manager [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Refreshing instance network info cache due to event network-changed-e7cad309-0a34-4148-9d0b-e47549d8689d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 853.720684] env[63028]: DEBUG oslo_concurrency.lockutils [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] Acquiring lock "refresh_cache-4e859327-ccd3-440e-b884-67f6cdadf97f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.720860] env[63028]: DEBUG oslo_concurrency.lockutils [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] Acquired lock "refresh_cache-4e859327-ccd3-440e-b884-67f6cdadf97f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.721039] env[63028]: DEBUG nova.network.neutron [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Refreshing network info cache for port e7cad309-0a34-4148-9d0b-e47549d8689d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 853.847892] env[63028]: DEBUG oslo_vmware.api [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Task: {'id': task-2735667, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.942924} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.848177] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 853.848366] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 853.848622] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 853.848826] env[63028]: INFO nova.compute.manager [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Took 1.61 seconds to destroy the instance on the hypervisor. [ 853.849083] env[63028]: DEBUG oslo.service.loopingcall [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 853.849272] env[63028]: DEBUG nova.compute.manager [-] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 853.849365] env[63028]: DEBUG nova.network.neutron [-] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 853.880916] env[63028]: DEBUG nova.network.neutron [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.948890] env[63028]: DEBUG nova.compute.manager [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 853.955445] env[63028]: DEBUG nova.network.neutron [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Successfully created port: 995d1950-1169-43bf-8afe-427bdcb37b9d {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 854.017640] env[63028]: DEBUG nova.network.neutron [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.077633] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735669, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.150568] env[63028]: DEBUG nova.compute.manager [req-c69c675b-1060-4d46-93b4-28a3dde37544 req-1d35c860-2579-4a71-ae56-ef2ce446beb8 service nova] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Received event network-vif-deleted-9917de69-098c-41fd-8a7e-63885001786a {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 854.150876] env[63028]: INFO nova.compute.manager [req-c69c675b-1060-4d46-93b4-28a3dde37544 req-1d35c860-2579-4a71-ae56-ef2ce446beb8 service nova] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Neutron deleted interface 9917de69-098c-41fd-8a7e-63885001786a; detaching it from the instance and deleting it from the info cache [ 854.150914] env[63028]: DEBUG nova.network.neutron [req-c69c675b-1060-4d46-93b4-28a3dde37544 req-1d35c860-2579-4a71-ae56-ef2ce446beb8 service nova] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.152905] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: b9db75ba-6832-45e8-8faf-d1cdaa7dabdd] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 854.280633] env[63028]: DEBUG nova.network.neutron [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 854.405644] env[63028]: DEBUG nova.network.neutron [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.453617] env[63028]: DEBUG nova.network.neutron [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Port 7dd5be5d-a88d-4dcd-a42d-7842895207f7 binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 854.453905] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.454159] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.454383] env[63028]: DEBUG nova.network.neutron [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 854.520591] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Releasing lock "refresh_cache-13e0ca05-3ab3-43e2-8b0d-8045e26d6723" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.521056] env[63028]: DEBUG nova.compute.manager [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 854.521266] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 854.522177] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffa3a81-03df-45b2-ac60-5a131d3abe3e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.532704] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.533118] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bb74f81-56d1-4029-99e6-f0205c1357ef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.539800] env[63028]: DEBUG oslo_vmware.api [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 854.539800] env[63028]: value = "task-2735670" [ 854.539800] env[63028]: _type = "Task" [ 854.539800] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.547504] env[63028]: DEBUG oslo_vmware.api [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735670, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.563581] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9e6405-1d9c-4802-be75-e69d2155da33 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.573169] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c7a85a-7399-4dd4-858f-785cc42d14d5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.579640] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735669, 'name': ReconfigVM_Task, 'duration_secs': 0.871496} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.580393] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Reconfigured VM instance instance-0000001c to attach disk [datastore2] ba57ed92-aaef-460c-bd45-d0cbe09e4615/ba57ed92-aaef-460c-bd45-d0cbe09e4615.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 854.581049] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2660c08-d048-48dd-85d3-aee8e1c6f810 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.608375] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b4d291-1e08-4874-9459-1a6dbc78c660 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.613146] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 854.613146] env[63028]: value = "task-2735671" [ 854.613146] env[63028]: _type = "Task" [ 854.613146] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.618448] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f771385-a378-43e1-8429-a8edce3fef66 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.627391] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735671, 'name': Rename_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.635203] env[63028]: DEBUG nova.network.neutron [-] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.636659] env[63028]: DEBUG nova.compute.provider_tree [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.655373] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 86d5d264-7a7a-434b-a1c4-e9a004c0a034] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 854.658137] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4154a74b-3beb-4496-b98e-e08816e10bf8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.667898] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273826b3-b24d-4445-8b89-5e7b503e5766 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.703921] env[63028]: DEBUG nova.compute.manager [req-c69c675b-1060-4d46-93b4-28a3dde37544 req-1d35c860-2579-4a71-ae56-ef2ce446beb8 service nova] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Detach interface failed, port_id=9917de69-098c-41fd-8a7e-63885001786a, reason: Instance 1316318e-8dcf-4ac2-b40a-6a3ab6964997 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 854.908933] env[63028]: DEBUG oslo_concurrency.lockutils [req-af81538e-c73c-4c15-9524-ce03da8545e5 req-a3df7e67-3049-4fc4-8db6-3cddefc935e8 service nova] Releasing lock "refresh_cache-4e859327-ccd3-440e-b884-67f6cdadf97f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.963659] env[63028]: DEBUG nova.compute.manager [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 854.990359] env[63028]: DEBUG nova.virt.hardware [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e395b48e9727d1179f6293b1c37579d7',container_format='bare',created_at=2025-02-20T17:58:03Z,direct_url=,disk_format='vmdk',id=c9823cf3-c81e-4b18-855a-a01f46d8c790,min_disk=1,min_ram=0,name='tempest-test-snap-326782054',owner='5ef9a42771824708832a74238bbe89c0',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-02-20T17:58:24Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 854.990614] env[63028]: DEBUG nova.virt.hardware [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 854.990793] env[63028]: DEBUG nova.virt.hardware [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 854.990979] env[63028]: DEBUG nova.virt.hardware [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 854.991141] env[63028]: DEBUG nova.virt.hardware [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 854.991288] env[63028]: DEBUG nova.virt.hardware [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 854.992080] env[63028]: DEBUG nova.virt.hardware [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 854.992080] env[63028]: DEBUG nova.virt.hardware [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 854.992080] env[63028]: DEBUG nova.virt.hardware [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 854.992080] env[63028]: DEBUG nova.virt.hardware [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 854.992304] env[63028]: DEBUG nova.virt.hardware [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 854.993172] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeca68d3-4ccb-46d7-b68b-40d611f82607 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.003989] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbf44c9-d224-4f8b-9f15-1240d2eabf11 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.049398] env[63028]: DEBUG oslo_vmware.api [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735670, 'name': PowerOffVM_Task, 'duration_secs': 0.102853} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.049672] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.049840] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.050089] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba4dff31-9792-4853-a830-df3d1c621e9f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.073430] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.073654] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.073839] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Deleting the datastore file [datastore1] 13e0ca05-3ab3-43e2-8b0d-8045e26d6723 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.074125] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7319d04-6cf8-4747-8f57-6963b4d88790 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.079974] env[63028]: DEBUG oslo_vmware.api [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for the task: (returnval){ [ 855.079974] env[63028]: value = "task-2735673" [ 855.079974] env[63028]: _type = "Task" [ 855.079974] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.089617] env[63028]: DEBUG oslo_vmware.api [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.123145] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735671, 'name': Rename_Task, 'duration_secs': 0.147608} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.123407] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 855.123636] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-beca0b24-b8b2-4eaa-b84c-32e43b9937c0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.129314] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 855.129314] env[63028]: value = "task-2735674" [ 855.129314] env[63028]: _type = "Task" [ 855.129314] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.136359] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735674, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.139208] env[63028]: INFO nova.compute.manager [-] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Took 1.29 seconds to deallocate network for instance. [ 855.144489] env[63028]: DEBUG nova.scheduler.client.report [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 855.161605] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 5a340e31-678c-437e-aa4e-07d5d9f4334f] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 855.242374] env[63028]: DEBUG nova.network.neutron [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance_info_cache with network_info: [{"id": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "address": "fa:16:3e:45:d9:fe", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dd5be5d-a8", "ovs_interfaceid": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.590672] env[63028]: DEBUG oslo_vmware.api [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Task: {'id': task-2735673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250316} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.591094] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.591308] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 855.591486] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 855.591659] env[63028]: INFO nova.compute.manager [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Took 1.07 seconds to destroy the instance on the hypervisor. [ 855.592037] env[63028]: DEBUG oslo.service.loopingcall [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.592117] env[63028]: DEBUG nova.compute.manager [-] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 855.592198] env[63028]: DEBUG nova.network.neutron [-] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 855.600744] env[63028]: DEBUG nova.network.neutron [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Successfully updated port: a9813eb4-922c-4f70-8c74-e7e5f11caf7d {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 855.617121] env[63028]: DEBUG nova.network.neutron [-] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.639620] env[63028]: DEBUG oslo_vmware.api [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735674, 'name': PowerOnVM_Task, 'duration_secs': 0.488827} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.640388] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 855.640549] env[63028]: DEBUG nova.compute.manager [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 855.641449] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f4fb50-5614-45f5-aeb0-118de8743fe2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.644671] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.648961] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.724s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.649465] env[63028]: DEBUG nova.compute.manager [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 855.658080] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.933s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.659527] env[63028]: INFO nova.compute.claims [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.666602] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 0e07a6cd-8c99-408d-95ba-63f7839c327f] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 855.745563] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.945088] env[63028]: DEBUG nova.compute.manager [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Received event network-vif-plugged-a9813eb4-922c-4f70-8c74-e7e5f11caf7d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 855.945287] env[63028]: DEBUG oslo_concurrency.lockutils [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] Acquiring lock "4e859327-ccd3-440e-b884-67f6cdadf97f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.945526] env[63028]: DEBUG oslo_concurrency.lockutils [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] Lock "4e859327-ccd3-440e-b884-67f6cdadf97f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.945658] env[63028]: DEBUG oslo_concurrency.lockutils [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] Lock "4e859327-ccd3-440e-b884-67f6cdadf97f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.945824] env[63028]: DEBUG nova.compute.manager [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] No waiting events found dispatching network-vif-plugged-a9813eb4-922c-4f70-8c74-e7e5f11caf7d {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 855.946603] env[63028]: WARNING nova.compute.manager [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Received unexpected event network-vif-plugged-a9813eb4-922c-4f70-8c74-e7e5f11caf7d for instance with vm_state building and task_state spawning. [ 855.946603] env[63028]: DEBUG nova.compute.manager [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Received event network-changed-a9813eb4-922c-4f70-8c74-e7e5f11caf7d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 855.946603] env[63028]: DEBUG nova.compute.manager [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Refreshing instance network info cache due to event network-changed-a9813eb4-922c-4f70-8c74-e7e5f11caf7d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 855.946774] env[63028]: DEBUG oslo_concurrency.lockutils [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] Acquiring lock "refresh_cache-4e859327-ccd3-440e-b884-67f6cdadf97f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.946909] env[63028]: DEBUG oslo_concurrency.lockutils [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] Acquired lock "refresh_cache-4e859327-ccd3-440e-b884-67f6cdadf97f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.947074] env[63028]: DEBUG nova.network.neutron [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Refreshing network info cache for port a9813eb4-922c-4f70-8c74-e7e5f11caf7d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 856.064754] env[63028]: DEBUG nova.network.neutron [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Successfully updated port: 995d1950-1169-43bf-8afe-427bdcb37b9d {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 856.107320] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "refresh_cache-4e859327-ccd3-440e-b884-67f6cdadf97f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.123095] env[63028]: DEBUG nova.network.neutron [-] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.157493] env[63028]: DEBUG nova.compute.utils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 856.158316] env[63028]: DEBUG nova.compute.manager [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 856.158478] env[63028]: DEBUG nova.network.neutron [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 856.167577] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.173022] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 8f6beda6-0fc6-4d85-9f27-f4248adda8f3] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 856.220490] env[63028]: DEBUG nova.policy [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c022ca18b0a41ce9d790fa25f6ebf8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea26842446ec4691a6456a6659188704', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 856.249563] env[63028]: DEBUG nova.compute.manager [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63028) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 856.249805] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.505301] env[63028]: DEBUG nova.network.neutron [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 856.546903] env[63028]: DEBUG nova.network.neutron [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Successfully created port: 0b5a99ec-110c-4325-b36a-92007f8e9e6a {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 856.567711] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "refresh_cache-9773ad95-1894-471d-8020-c7952eac4be4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.567866] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "refresh_cache-9773ad95-1894-471d-8020-c7952eac4be4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.568017] env[63028]: DEBUG nova.network.neutron [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 856.625470] env[63028]: INFO nova.compute.manager [-] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Took 1.03 seconds to deallocate network for instance. [ 856.665105] env[63028]: DEBUG nova.compute.manager [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 856.677467] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 99886410-ec47-46ad-9d07-ee3593006997] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 856.681728] env[63028]: DEBUG nova.network.neutron [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.106381] env[63028]: DEBUG nova.network.neutron [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 857.141236] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.181562] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 2ae111f7-4eaa-46c2-ab97-907daa913834] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 857.183696] env[63028]: DEBUG oslo_concurrency.lockutils [req-6a5e1c10-ada2-4128-96ba-d6d847965097 req-13d122e6-cf7d-4e96-9be6-90763c027c14 service nova] Releasing lock "refresh_cache-4e859327-ccd3-440e-b884-67f6cdadf97f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.184226] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquired lock "refresh_cache-4e859327-ccd3-440e-b884-67f6cdadf97f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.184383] env[63028]: DEBUG nova.network.neutron [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 857.201959] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01eb1ba4-2ece-4e97-b2b6-e341e442ffbd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.214194] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3084e05-17df-4e6e-a0d4-c5788137ba37 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.247584] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134e64b6-08f7-478d-8f43-74df73c416a9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.256382] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d05d5fa-36b4-421f-9f37-8f26f1068b28 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.276757] env[63028]: DEBUG nova.compute.provider_tree [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.315994] env[63028]: DEBUG nova.network.neutron [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Updating instance_info_cache with network_info: [{"id": "995d1950-1169-43bf-8afe-427bdcb37b9d", "address": "fa:16:3e:1d:3f:7f", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995d1950-11", "ovs_interfaceid": "995d1950-1169-43bf-8afe-427bdcb37b9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.677542] env[63028]: DEBUG nova.compute.manager [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 857.685203] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: ddf20137-4d63-4c7a-b519-445719265e1d] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 857.703519] env[63028]: DEBUG nova.virt.hardware [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 857.703753] env[63028]: DEBUG nova.virt.hardware [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 857.704018] env[63028]: DEBUG nova.virt.hardware [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 857.704312] env[63028]: DEBUG nova.virt.hardware [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 857.704479] env[63028]: DEBUG nova.virt.hardware [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 857.704631] env[63028]: DEBUG nova.virt.hardware [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 857.704839] env[63028]: DEBUG nova.virt.hardware [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 857.705015] env[63028]: DEBUG nova.virt.hardware [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 857.705226] env[63028]: DEBUG nova.virt.hardware [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 857.705401] env[63028]: DEBUG nova.virt.hardware [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 857.705575] env[63028]: DEBUG nova.virt.hardware [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 857.706423] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62a690c-d7c3-4c80-a05f-d28b1a11126c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.714922] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d87cd3-9e3d-4406-a5a8-8fca26ccc943 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.740635] env[63028]: DEBUG nova.network.neutron [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 857.780538] env[63028]: DEBUG nova.scheduler.client.report [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 857.823600] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "refresh_cache-9773ad95-1894-471d-8020-c7952eac4be4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.824009] env[63028]: DEBUG nova.compute.manager [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Instance network_info: |[{"id": "995d1950-1169-43bf-8afe-427bdcb37b9d", "address": "fa:16:3e:1d:3f:7f", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995d1950-11", "ovs_interfaceid": "995d1950-1169-43bf-8afe-427bdcb37b9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 857.824486] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:3f:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c330dbdb-ad20-4e7e-8a12-66e4a914a84a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '995d1950-1169-43bf-8afe-427bdcb37b9d', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 857.831976] env[63028]: DEBUG oslo.service.loopingcall [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 857.832454] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 857.832692] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d3df342-e1fb-4dde-a4c0-88c58468e7c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.857963] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 857.857963] env[63028]: value = "task-2735675" [ 857.857963] env[63028]: _type = "Task" [ 857.857963] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.869063] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735675, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.871630] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "f4718363-73b2-4016-8849-f75e98259023" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.871905] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "f4718363-73b2-4016-8849-f75e98259023" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.872146] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "f4718363-73b2-4016-8849-f75e98259023-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.872363] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "f4718363-73b2-4016-8849-f75e98259023-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.873047] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "f4718363-73b2-4016-8849-f75e98259023-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.874586] env[63028]: INFO nova.compute.manager [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Terminating instance [ 857.973462] env[63028]: DEBUG nova.compute.manager [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Received event network-vif-plugged-995d1950-1169-43bf-8afe-427bdcb37b9d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 857.973462] env[63028]: DEBUG oslo_concurrency.lockutils [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] Acquiring lock "9773ad95-1894-471d-8020-c7952eac4be4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.973462] env[63028]: DEBUG oslo_concurrency.lockutils [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] Lock "9773ad95-1894-471d-8020-c7952eac4be4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.973462] env[63028]: DEBUG oslo_concurrency.lockutils [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] Lock "9773ad95-1894-471d-8020-c7952eac4be4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.973462] env[63028]: DEBUG nova.compute.manager [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] No waiting events found dispatching network-vif-plugged-995d1950-1169-43bf-8afe-427bdcb37b9d {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 857.973462] env[63028]: WARNING nova.compute.manager [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Received unexpected event network-vif-plugged-995d1950-1169-43bf-8afe-427bdcb37b9d for instance with vm_state building and task_state spawning. [ 857.973462] env[63028]: DEBUG nova.compute.manager [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Received event network-changed-995d1950-1169-43bf-8afe-427bdcb37b9d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 857.973462] env[63028]: DEBUG nova.compute.manager [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Refreshing instance network info cache due to event network-changed-995d1950-1169-43bf-8afe-427bdcb37b9d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 857.973462] env[63028]: DEBUG oslo_concurrency.lockutils [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] Acquiring lock "refresh_cache-9773ad95-1894-471d-8020-c7952eac4be4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.973462] env[63028]: DEBUG oslo_concurrency.lockutils [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] Acquired lock "refresh_cache-9773ad95-1894-471d-8020-c7952eac4be4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.974107] env[63028]: DEBUG nova.network.neutron [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Refreshing network info cache for port 995d1950-1169-43bf-8afe-427bdcb37b9d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 858.190320] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 9b1cd3c1-5e9a-43b6-9efb-1baf879ae0c0] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 858.223914] env[63028]: DEBUG nova.network.neutron [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Updating instance_info_cache with network_info: [{"id": "e7cad309-0a34-4148-9d0b-e47549d8689d", "address": "fa:16:3e:3b:74:3e", "network": {"id": "52994a57-c88f-4aa3-bbb8-70a2ec1ef324", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-440949804", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7cad309-0a", "ovs_interfaceid": "e7cad309-0a34-4148-9d0b-e47549d8689d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a9813eb4-922c-4f70-8c74-e7e5f11caf7d", "address": "fa:16:3e:7b:11:b1", "network": {"id": "56fd7781-6150-4604-b524-dccaff73dc3a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1118372812", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9813eb4-92", "ovs_interfaceid": "a9813eb4-922c-4f70-8c74-e7e5f11caf7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.286774] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.629s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.287359] env[63028]: DEBUG nova.compute.manager [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 858.290204] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.088s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.293500] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.293500] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.591s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.294432] env[63028]: INFO nova.compute.claims [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 858.326316] env[63028]: INFO nova.scheduler.client.report [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Deleted allocations for instance e346c31b-ef1b-4f75-8564-cefe26bd672f [ 858.369988] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735675, 'name': CreateVM_Task, 'duration_secs': 0.308436} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.370105] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 858.371839] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.371839] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.371839] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 858.372245] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90ead090-ccea-446c-93b5-772e91ddbab0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.385162] env[63028]: DEBUG nova.compute.manager [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 858.385162] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 858.385162] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 858.385162] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524b9c18-7723-a8fb-2eb5-7125c9216b35" [ 858.385162] env[63028]: _type = "Task" [ 858.385162] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.386000] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7214dbbf-a3de-4212-85c4-6bf149db0412 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.389524] env[63028]: DEBUG nova.network.neutron [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Successfully updated port: 0b5a99ec-110c-4325-b36a-92007f8e9e6a {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 858.402622] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.402622] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Processing image c9823cf3-c81e-4b18-855a-a01f46d8c790 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 858.402780] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790/c9823cf3-c81e-4b18-855a-a01f46d8c790.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.403375] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790/c9823cf3-c81e-4b18-855a-a01f46d8c790.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.403375] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 858.403375] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 858.404523] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ba20d57-cc5f-468b-9ae8-c66ea62dd9ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.406830] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46ea833d-29fc-48a2-a64d-2c2f35c15222 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.414756] env[63028]: DEBUG oslo_vmware.api [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 858.414756] env[63028]: value = "task-2735676" [ 858.414756] env[63028]: _type = "Task" [ 858.414756] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.416072] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.416259] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 858.419558] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15d04210-2cf0-482b-8dea-835568f4fea6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.427844] env[63028]: DEBUG oslo_vmware.api [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735676, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.428555] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 858.428555] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520a7a6a-7c97-4823-4a21-e3c290432e5e" [ 858.428555] env[63028]: _type = "Task" [ 858.428555] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.438011] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520a7a6a-7c97-4823-4a21-e3c290432e5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.694706] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 44fca05f-51db-4252-bcf8-6bcad37a6147] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 858.727017] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Releasing lock "refresh_cache-4e859327-ccd3-440e-b884-67f6cdadf97f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.727675] env[63028]: DEBUG nova.compute.manager [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Instance network_info: |[{"id": "e7cad309-0a34-4148-9d0b-e47549d8689d", "address": "fa:16:3e:3b:74:3e", "network": {"id": "52994a57-c88f-4aa3-bbb8-70a2ec1ef324", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-440949804", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7cad309-0a", "ovs_interfaceid": "e7cad309-0a34-4148-9d0b-e47549d8689d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a9813eb4-922c-4f70-8c74-e7e5f11caf7d", "address": "fa:16:3e:7b:11:b1", "network": {"id": "56fd7781-6150-4604-b524-dccaff73dc3a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1118372812", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9813eb4-92", "ovs_interfaceid": "a9813eb4-922c-4f70-8c74-e7e5f11caf7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 858.728272] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:74:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e7cad309-0a34-4148-9d0b-e47549d8689d', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:11:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9813eb4-922c-4f70-8c74-e7e5f11caf7d', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 858.740048] env[63028]: DEBUG oslo.service.loopingcall [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.740364] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 858.741813] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae707b9f-fa4d-4abc-bb50-486fedfdb6be {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.758679] env[63028]: DEBUG nova.network.neutron [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Updated VIF entry in instance network info cache for port 995d1950-1169-43bf-8afe-427bdcb37b9d. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 858.759090] env[63028]: DEBUG nova.network.neutron [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Updating instance_info_cache with network_info: [{"id": "995d1950-1169-43bf-8afe-427bdcb37b9d", "address": "fa:16:3e:1d:3f:7f", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995d1950-11", "ovs_interfaceid": "995d1950-1169-43bf-8afe-427bdcb37b9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.765863] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 858.765863] env[63028]: value = "task-2735677" [ 858.765863] env[63028]: _type = "Task" [ 858.765863] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.774720] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735677, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.801103] env[63028]: DEBUG nova.compute.utils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 858.802541] env[63028]: DEBUG nova.compute.manager [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 858.802696] env[63028]: DEBUG nova.network.neutron [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 858.841446] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ad39a64e-600e-42cd-a15b-f0ddb3c175b1 tempest-InstanceActionsV221TestJSON-26958138 tempest-InstanceActionsV221TestJSON-26958138-project-member] Lock "e346c31b-ef1b-4f75-8564-cefe26bd672f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.251s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.863671] env[63028]: DEBUG nova.policy [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54076b7d25474185b3f205437cb68be8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68de7445caeb4381b9e68c685ccb5e0b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 858.895902] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "refresh_cache-79f4ef22-a589-4d5c-8832-5d5dcdd55561" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.895902] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "refresh_cache-79f4ef22-a589-4d5c-8832-5d5dcdd55561" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.895902] env[63028]: DEBUG nova.network.neutron [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 858.926766] env[63028]: DEBUG oslo_vmware.api [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735676, 'name': PowerOffVM_Task, 'duration_secs': 0.168844} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.927089] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 858.927315] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 858.927624] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0cddc5f-122d-48dd-8113-21266c03bc4a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.938121] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Preparing fetch location {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 858.938430] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Fetch image to [datastore1] OSTACK_IMG_b84721b1-833b-407b-a3b9-93fbbcee8a1c/OSTACK_IMG_b84721b1-833b-407b-a3b9-93fbbcee8a1c.vmdk {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 858.938656] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Downloading stream optimized image c9823cf3-c81e-4b18-855a-a01f46d8c790 to [datastore1] OSTACK_IMG_b84721b1-833b-407b-a3b9-93fbbcee8a1c/OSTACK_IMG_b84721b1-833b-407b-a3b9-93fbbcee8a1c.vmdk on the data store datastore1 as vApp {{(pid=63028) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 858.938984] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Downloading image file data c9823cf3-c81e-4b18-855a-a01f46d8c790 to the ESX as VM named 'OSTACK_IMG_b84721b1-833b-407b-a3b9-93fbbcee8a1c' {{(pid=63028) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 858.992295] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 858.992519] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 858.992710] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleting the datastore file [datastore2] f4718363-73b2-4016-8849-f75e98259023 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 858.993303] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7236cf5-faf9-45e7-8c2f-158af264d1cd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.001700] env[63028]: DEBUG oslo_vmware.api [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 859.001700] env[63028]: value = "task-2735679" [ 859.001700] env[63028]: _type = "Task" [ 859.001700] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.016357] env[63028]: DEBUG oslo_vmware.api [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735679, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.036020] env[63028]: DEBUG oslo_vmware.rw_handles [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 859.036020] env[63028]: value = "resgroup-9" [ 859.036020] env[63028]: _type = "ResourcePool" [ 859.036020] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 859.036468] env[63028]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-eabe307e-422a-4173-bd77-ef0ef6cb9a3b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.058674] env[63028]: DEBUG oslo_vmware.rw_handles [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lease: (returnval){ [ 859.058674] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fcbbfc-b898-ef99-0e6f-4da061b1a255" [ 859.058674] env[63028]: _type = "HttpNfcLease" [ 859.058674] env[63028]: } obtained for vApp import into resource pool (val){ [ 859.058674] env[63028]: value = "resgroup-9" [ 859.058674] env[63028]: _type = "ResourcePool" [ 859.058674] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 859.058999] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the lease: (returnval){ [ 859.058999] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fcbbfc-b898-ef99-0e6f-4da061b1a255" [ 859.058999] env[63028]: _type = "HttpNfcLease" [ 859.058999] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 859.067481] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 859.067481] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fcbbfc-b898-ef99-0e6f-4da061b1a255" [ 859.067481] env[63028]: _type = "HttpNfcLease" [ 859.067481] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 859.199770] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 4a782483-c24e-44db-b697-856c69cc4a13] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 859.262380] env[63028]: DEBUG oslo_concurrency.lockutils [req-40493801-8d00-4585-9b7d-5c8a507523b7 req-6c3d7077-5b24-43f9-ab7d-ece04b767c43 service nova] Releasing lock "refresh_cache-9773ad95-1894-471d-8020-c7952eac4be4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.276030] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735677, 'name': CreateVM_Task, 'duration_secs': 0.343024} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.276030] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 859.276738] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.276910] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.277256] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 859.277515] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a25a9cf7-ce17-4e3c-8b34-cb15c471c06d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.282031] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 859.282031] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5217669c-013b-a083-5981-a1f2b68bf473" [ 859.282031] env[63028]: _type = "Task" [ 859.282031] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.291531] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5217669c-013b-a083-5981-a1f2b68bf473, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.292213] env[63028]: DEBUG nova.network.neutron [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Successfully created port: abc41dea-8b6f-4cf7-b02f-21996a0aaf8d {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 859.308139] env[63028]: DEBUG nova.compute.manager [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 859.464273] env[63028]: DEBUG nova.network.neutron [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 859.515329] env[63028]: DEBUG oslo_vmware.api [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735679, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389517} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.519426] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 859.522306] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 859.522721] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 859.522952] env[63028]: INFO nova.compute.manager [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: f4718363-73b2-4016-8849-f75e98259023] Took 1.14 seconds to destroy the instance on the hypervisor. [ 859.523235] env[63028]: DEBUG oslo.service.loopingcall [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 859.523643] env[63028]: DEBUG nova.compute.manager [-] [instance: f4718363-73b2-4016-8849-f75e98259023] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 859.523742] env[63028]: DEBUG nova.network.neutron [-] [instance: f4718363-73b2-4016-8849-f75e98259023] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 859.570224] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 859.570224] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fcbbfc-b898-ef99-0e6f-4da061b1a255" [ 859.570224] env[63028]: _type = "HttpNfcLease" [ 859.570224] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 859.705167] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: c7a3f2c6-8368-49cc-9737-ea1d836f1783] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 859.797905] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5217669c-013b-a083-5981-a1f2b68bf473, 'name': SearchDatastore_Task, 'duration_secs': 0.023744} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.798316] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.799271] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 859.799670] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.799798] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.800033] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 859.800348] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8ebf2e0-610d-4c52-bc0d-1a8fe2758443 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.812106] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 859.813023] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 859.817137] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58dd3a9f-d11d-4dba-acdc-eca08f76592f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.824925] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 859.824925] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c1daeb-60b6-b5c8-3439-d00f1bb79f97" [ 859.824925] env[63028]: _type = "Task" [ 859.824925] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.836224] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c1daeb-60b6-b5c8-3439-d00f1bb79f97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.848421] env[63028]: DEBUG nova.network.neutron [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Updating instance_info_cache with network_info: [{"id": "0b5a99ec-110c-4325-b36a-92007f8e9e6a", "address": "fa:16:3e:77:27:03", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b5a99ec-11", "ovs_interfaceid": "0b5a99ec-110c-4325-b36a-92007f8e9e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.917069] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b174d16e-836b-4779-85e3-7286ad28b0d8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.925242] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd470da8-2520-49a2-bac5-d501b25259c8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.961100] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57474d0c-868f-45a2-950c-6c52e5a51b39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.970251] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b613d0-04e5-4554-aa8c-c13f3ab20f8c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.987121] env[63028]: DEBUG nova.compute.provider_tree [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.001506] env[63028]: DEBUG nova.compute.manager [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Received event network-vif-plugged-0b5a99ec-110c-4325-b36a-92007f8e9e6a {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 860.001747] env[63028]: DEBUG oslo_concurrency.lockutils [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] Acquiring lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.002026] env[63028]: DEBUG oslo_concurrency.lockutils [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] Lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.002230] env[63028]: DEBUG oslo_concurrency.lockutils [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] Lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.002403] env[63028]: DEBUG nova.compute.manager [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] No waiting events found dispatching network-vif-plugged-0b5a99ec-110c-4325-b36a-92007f8e9e6a {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 860.002592] env[63028]: WARNING nova.compute.manager [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Received unexpected event network-vif-plugged-0b5a99ec-110c-4325-b36a-92007f8e9e6a for instance with vm_state building and task_state spawning. [ 860.002781] env[63028]: DEBUG nova.compute.manager [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Received event network-changed-0b5a99ec-110c-4325-b36a-92007f8e9e6a {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 860.002971] env[63028]: DEBUG nova.compute.manager [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Refreshing instance network info cache due to event network-changed-0b5a99ec-110c-4325-b36a-92007f8e9e6a. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 860.003158] env[63028]: DEBUG oslo_concurrency.lockutils [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] Acquiring lock "refresh_cache-79f4ef22-a589-4d5c-8832-5d5dcdd55561" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.069021] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 860.069021] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fcbbfc-b898-ef99-0e6f-4da061b1a255" [ 860.069021] env[63028]: _type = "HttpNfcLease" [ 860.069021] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 860.210375] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: c9cc1ac7-06c6-415b-86ce-daf4849bfc05] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 860.323363] env[63028]: DEBUG nova.compute.manager [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 860.341372] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c1daeb-60b6-b5c8-3439-d00f1bb79f97, 'name': SearchDatastore_Task, 'duration_secs': 0.010582} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.344322] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4718e55e-4f50-481d-b7ba-78f10bc89cc7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.350721] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "refresh_cache-79f4ef22-a589-4d5c-8832-5d5dcdd55561" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.351743] env[63028]: DEBUG nova.compute.manager [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Instance network_info: |[{"id": "0b5a99ec-110c-4325-b36a-92007f8e9e6a", "address": "fa:16:3e:77:27:03", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b5a99ec-11", "ovs_interfaceid": "0b5a99ec-110c-4325-b36a-92007f8e9e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 860.352018] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 860.352018] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d443bf-b09b-7a4e-ef38-985b52454a9f" [ 860.352018] env[63028]: _type = "Task" [ 860.352018] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.352268] env[63028]: DEBUG oslo_concurrency.lockutils [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] Acquired lock "refresh_cache-79f4ef22-a589-4d5c-8832-5d5dcdd55561" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.354959] env[63028]: DEBUG nova.network.neutron [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Refreshing network info cache for port 0b5a99ec-110c-4325-b36a-92007f8e9e6a {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 860.356266] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:27:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '706c9762-1cf8-4770-897d-377d0d927773', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b5a99ec-110c-4325-b36a-92007f8e9e6a', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 860.363594] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Creating folder: Project (ea26842446ec4691a6456a6659188704). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.368342] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a386ea41-eef1-4b99-af52-fa3c01e478b2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.382733] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d443bf-b09b-7a4e-ef38-985b52454a9f, 'name': SearchDatastore_Task, 'duration_secs': 0.010532} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.385180] env[63028]: DEBUG nova.virt.hardware [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 860.385180] env[63028]: DEBUG nova.virt.hardware [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 860.385334] env[63028]: DEBUG nova.virt.hardware [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 860.385522] env[63028]: DEBUG nova.virt.hardware [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 860.385679] env[63028]: DEBUG nova.virt.hardware [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 860.385796] env[63028]: DEBUG nova.virt.hardware [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 860.386016] env[63028]: DEBUG nova.virt.hardware [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 860.386640] env[63028]: DEBUG nova.virt.hardware [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 860.386640] env[63028]: DEBUG nova.virt.hardware [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 860.386640] env[63028]: DEBUG nova.virt.hardware [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 860.386783] env[63028]: DEBUG nova.virt.hardware [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 860.388169] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.388558] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 4e859327-ccd3-440e-b884-67f6cdadf97f/4e859327-ccd3-440e-b884-67f6cdadf97f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 860.389187] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1f77ed-a536-4a56-b5a1-aac24ae15ccd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.392626] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Created folder: Project (ea26842446ec4691a6456a6659188704) in parent group-v550570. [ 860.392802] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Creating folder: Instances. Parent ref: group-v550779. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.393035] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17328f15-25b9-4687-964d-701e95ec2a1c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.395340] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ebc0d3a7-fe3f-41c8-812d-480b861eb763 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.404516] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590851c1-f31f-4745-9163-737ff4bf3c3a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.406932] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 860.406932] env[63028]: value = "task-2735682" [ 860.406932] env[63028]: _type = "Task" [ 860.406932] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.408610] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Created folder: Instances in parent group-v550779. [ 860.408734] env[63028]: DEBUG oslo.service.loopingcall [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 860.412368] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 860.424303] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9caf2967-cfc8-4781-9af4-c33ae6e3185a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.443082] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735682, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.444710] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 860.444710] env[63028]: value = "task-2735684" [ 860.444710] env[63028]: _type = "Task" [ 860.444710] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.452493] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735684, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.491059] env[63028]: DEBUG nova.scheduler.client.report [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 860.546600] env[63028]: DEBUG nova.network.neutron [-] [instance: f4718363-73b2-4016-8849-f75e98259023] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.569943] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 860.569943] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fcbbfc-b898-ef99-0e6f-4da061b1a255" [ 860.569943] env[63028]: _type = "HttpNfcLease" [ 860.569943] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 860.620843] env[63028]: DEBUG nova.network.neutron [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Updated VIF entry in instance network info cache for port 0b5a99ec-110c-4325-b36a-92007f8e9e6a. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 860.621282] env[63028]: DEBUG nova.network.neutron [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Updating instance_info_cache with network_info: [{"id": "0b5a99ec-110c-4325-b36a-92007f8e9e6a", "address": "fa:16:3e:77:27:03", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b5a99ec-11", "ovs_interfaceid": "0b5a99ec-110c-4325-b36a-92007f8e9e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.714765] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 0dbafad1-ab21-439d-bc8e-e447ac33304e] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 860.919327] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735682, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.958617] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735684, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.995731] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.703s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.996444] env[63028]: DEBUG nova.compute.manager [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 860.999479] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.329s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.001169] env[63028]: INFO nova.compute.claims [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.049466] env[63028]: INFO nova.compute.manager [-] [instance: f4718363-73b2-4016-8849-f75e98259023] Took 1.53 seconds to deallocate network for instance. [ 861.070112] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 861.070112] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fcbbfc-b898-ef99-0e6f-4da061b1a255" [ 861.070112] env[63028]: _type = "HttpNfcLease" [ 861.070112] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 861.123764] env[63028]: DEBUG oslo_concurrency.lockutils [req-8361a1e8-37d8-428a-903f-40d57b573e16 req-e4e7fe50-9a7d-4765-b6a9-eba7adede2ed service nova] Releasing lock "refresh_cache-79f4ef22-a589-4d5c-8832-5d5dcdd55561" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.218939] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 5a330ed9-c106-49f2-b524-a424e717b5ce] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 861.376733] env[63028]: DEBUG nova.network.neutron [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Successfully updated port: abc41dea-8b6f-4cf7-b02f-21996a0aaf8d {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 861.421130] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735682, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521047} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.421681] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 4e859327-ccd3-440e-b884-67f6cdadf97f/4e859327-ccd3-440e-b884-67f6cdadf97f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 861.421900] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 861.422162] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce8d90dc-ab61-42c1-b376-70dc3bae88a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.428633] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 861.428633] env[63028]: value = "task-2735685" [ 861.428633] env[63028]: _type = "Task" [ 861.428633] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.436537] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735685, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.455880] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735684, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.505476] env[63028]: DEBUG nova.compute.utils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 861.508848] env[63028]: DEBUG nova.compute.manager [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 861.509052] env[63028]: DEBUG nova.network.neutron [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 861.546579] env[63028]: DEBUG nova.policy [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '736ca268dc2c434aac2165473ea28d99', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2304ce21bf141cab94fb6c342653812', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 861.563018] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.569302] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 861.569302] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fcbbfc-b898-ef99-0e6f-4da061b1a255" [ 861.569302] env[63028]: _type = "HttpNfcLease" [ 861.569302] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 861.722283] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 67440140-a619-41f2-98fe-eff23e8ad8a5] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 861.864418] env[63028]: DEBUG nova.network.neutron [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Successfully created port: 4da14eb6-411a-4cdd-afe0-bd34e474882f {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 861.881327] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "refresh_cache-a50e1167-d8ed-4099-83c3-a5066ab0be1f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.881538] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired lock "refresh_cache-a50e1167-d8ed-4099-83c3-a5066ab0be1f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.881773] env[63028]: DEBUG nova.network.neutron [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 861.946123] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735685, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067043} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.951379] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 861.952279] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b30804-c419-4ca1-8a57-8e864963a2cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.966448] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735684, 'name': CreateVM_Task, 'duration_secs': 1.022446} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.979067] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 861.987788] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 4e859327-ccd3-440e-b884-67f6cdadf97f/4e859327-ccd3-440e-b884-67f6cdadf97f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 861.988621] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.988799] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.989143] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 861.989381] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e02702e-04a1-4d82-9a9d-7808d8ac26b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.004068] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8569df8-4223-44f0-af85-1ea7914c9be0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.009341] env[63028]: DEBUG nova.compute.manager [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 862.012018] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 862.012018] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e0d25b-f60a-edc0-41fe-68de54f56dd8" [ 862.012018] env[63028]: _type = "Task" [ 862.012018] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.016225] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 862.016225] env[63028]: value = "task-2735686" [ 862.016225] env[63028]: _type = "Task" [ 862.016225] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.032604] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e0d25b-f60a-edc0-41fe-68de54f56dd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.032961] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735686, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.073816] env[63028]: DEBUG nova.compute.manager [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] [instance: f4718363-73b2-4016-8849-f75e98259023] Received event network-vif-deleted-d63bc515-e6bf-4c5c-88b1-4d1e21688dc4 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 862.073986] env[63028]: DEBUG nova.compute.manager [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Received event network-vif-plugged-abc41dea-8b6f-4cf7-b02f-21996a0aaf8d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 862.074212] env[63028]: DEBUG oslo_concurrency.lockutils [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] Acquiring lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.074421] env[63028]: DEBUG oslo_concurrency.lockutils [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.074660] env[63028]: DEBUG oslo_concurrency.lockutils [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.074761] env[63028]: DEBUG nova.compute.manager [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] No waiting events found dispatching network-vif-plugged-abc41dea-8b6f-4cf7-b02f-21996a0aaf8d {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 862.074888] env[63028]: WARNING nova.compute.manager [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Received unexpected event network-vif-plugged-abc41dea-8b6f-4cf7-b02f-21996a0aaf8d for instance with vm_state building and task_state spawning. [ 862.075058] env[63028]: DEBUG nova.compute.manager [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Received event network-changed-abc41dea-8b6f-4cf7-b02f-21996a0aaf8d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 862.075220] env[63028]: DEBUG nova.compute.manager [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Refreshing instance network info cache due to event network-changed-abc41dea-8b6f-4cf7-b02f-21996a0aaf8d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 862.075382] env[63028]: DEBUG oslo_concurrency.lockutils [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] Acquiring lock "refresh_cache-a50e1167-d8ed-4099-83c3-a5066ab0be1f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.078570] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 862.078570] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fcbbfc-b898-ef99-0e6f-4da061b1a255" [ 862.078570] env[63028]: _type = "HttpNfcLease" [ 862.078570] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 862.078570] env[63028]: DEBUG oslo_vmware.rw_handles [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 862.078570] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fcbbfc-b898-ef99-0e6f-4da061b1a255" [ 862.078570] env[63028]: _type = "HttpNfcLease" [ 862.078570] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 862.078976] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e78c124-8be4-41a5-be58-31a1ed4b2314 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.091904] env[63028]: DEBUG oslo_vmware.rw_handles [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ddb32d-a300-3ffa-52af-50f17fbda9cc/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 862.091904] env[63028]: DEBUG oslo_vmware.rw_handles [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ddb32d-a300-3ffa-52af-50f17fbda9cc/disk-0.vmdk. {{(pid=63028) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 862.159598] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2870dcb0-2473-4fb3-bb4b-2bb36ab9e9b6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.225689] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 2dcdb36f-6c2d-4f70-8aed-27f6511ef3e8] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 862.429290] env[63028]: DEBUG nova.network.neutron [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 862.537724] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735686, 'name': ReconfigVM_Task, 'duration_secs': 0.475422} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.550028] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 4e859327-ccd3-440e-b884-67f6cdadf97f/4e859327-ccd3-440e-b884-67f6cdadf97f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 862.550028] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e0d25b-f60a-edc0-41fe-68de54f56dd8, 'name': SearchDatastore_Task, 'duration_secs': 0.020458} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.552249] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-115ac356-2db2-426d-980f-67cba9577667 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.554302] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.554570] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 862.554856] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.555060] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.555288] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 862.558526] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27f03a26-e553-4c2d-a4e2-0cbf9f72511e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.568285] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 862.568285] env[63028]: value = "task-2735687" [ 862.568285] env[63028]: _type = "Task" [ 862.568285] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.576904] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 862.577102] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 862.580323] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00d7708b-6a90-40b7-ba74-35e86583accb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.586110] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735687, 'name': Rename_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.589954] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 862.589954] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527ad118-4c47-a2e9-6ed3-f5cf7e378f20" [ 862.589954] env[63028]: _type = "Task" [ 862.589954] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.600954] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527ad118-4c47-a2e9-6ed3-f5cf7e378f20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.602618] env[63028]: DEBUG nova.network.neutron [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Updating instance_info_cache with network_info: [{"id": "abc41dea-8b6f-4cf7-b02f-21996a0aaf8d", "address": "fa:16:3e:35:71:27", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabc41dea-8b", "ovs_interfaceid": "abc41dea-8b6f-4cf7-b02f-21996a0aaf8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.654351] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44111d2-bb8f-4b1a-a911-6277d59a0e70 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.661700] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31eac1f7-dc18-4da4-8501-e03accdce714 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.694874] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1f4745-ff60-449e-910a-bf86b0482952 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.702513] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66bb64d0-4b86-42e6-8acc-668cd06b11c8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.716274] env[63028]: DEBUG nova.compute.provider_tree [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.731144] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 1eeb96d1-6e03-4192-a9db-955444519fd7] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 863.019371] env[63028]: DEBUG nova.compute.manager [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 863.047104] env[63028]: DEBUG nova.virt.hardware [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 863.047104] env[63028]: DEBUG nova.virt.hardware [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.047489] env[63028]: DEBUG nova.virt.hardware [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 863.047575] env[63028]: DEBUG nova.virt.hardware [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.047829] env[63028]: DEBUG nova.virt.hardware [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 863.048037] env[63028]: DEBUG nova.virt.hardware [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 863.048345] env[63028]: DEBUG nova.virt.hardware [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 863.048496] env[63028]: DEBUG nova.virt.hardware [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 863.048698] env[63028]: DEBUG nova.virt.hardware [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 863.048924] env[63028]: DEBUG nova.virt.hardware [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 863.049164] env[63028]: DEBUG nova.virt.hardware [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 863.050028] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd86793d-8c60-447c-8884-7d43bd7e0042 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.058514] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab75b2d-0322-472b-91f7-7d654cf90494 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.088328] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735687, 'name': Rename_Task, 'duration_secs': 0.153578} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.088328] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 863.089609] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59cc50d4-6faa-44ad-9fb9-e9ff47ecc59b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.101572] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527ad118-4c47-a2e9-6ed3-f5cf7e378f20, 'name': SearchDatastore_Task, 'duration_secs': 0.024795} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.103180] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 863.103180] env[63028]: value = "task-2735688" [ 863.103180] env[63028]: _type = "Task" [ 863.103180] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.103380] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec6cfd20-0e83-4d92-96b9-e3d40ecfd0fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.105979] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Releasing lock "refresh_cache-a50e1167-d8ed-4099-83c3-a5066ab0be1f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.106350] env[63028]: DEBUG nova.compute.manager [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Instance network_info: |[{"id": "abc41dea-8b6f-4cf7-b02f-21996a0aaf8d", "address": "fa:16:3e:35:71:27", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabc41dea-8b", "ovs_interfaceid": "abc41dea-8b6f-4cf7-b02f-21996a0aaf8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 863.109413] env[63028]: DEBUG oslo_concurrency.lockutils [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] Acquired lock "refresh_cache-a50e1167-d8ed-4099-83c3-a5066ab0be1f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.109605] env[63028]: DEBUG nova.network.neutron [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Refreshing network info cache for port abc41dea-8b6f-4cf7-b02f-21996a0aaf8d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 863.111165] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:71:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b356db78-99c7-4464-822c-fc7e193f7878', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'abc41dea-8b6f-4cf7-b02f-21996a0aaf8d', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 863.118775] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Creating folder: Project (68de7445caeb4381b9e68c685ccb5e0b). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.120379] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e26f2130-26c0-489c-9cfb-6fd6680d3c8e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.125059] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 863.125059] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5226f289-9084-221f-c971-91978f2f06b3" [ 863.125059] env[63028]: _type = "Task" [ 863.125059] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.131455] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735688, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.135867] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Created folder: Project (68de7445caeb4381b9e68c685ccb5e0b) in parent group-v550570. [ 863.136133] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Creating folder: Instances. Parent ref: group-v550782. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.136870] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a42ea3ca-d6c1-445f-8256-998cddfddd6a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.141326] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5226f289-9084-221f-c971-91978f2f06b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.151958] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Created folder: Instances in parent group-v550782. [ 863.152486] env[63028]: DEBUG oslo.service.loopingcall [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 863.152486] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 863.152720] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32da1e78-9285-4b5a-8b43-7f349a042d05 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.175474] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 863.175474] env[63028]: value = "task-2735691" [ 863.175474] env[63028]: _type = "Task" [ 863.175474] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.186514] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735691, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.221554] env[63028]: DEBUG nova.scheduler.client.report [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 863.234392] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: e20ed04f-205b-4aa9-b8b6-e352cd237412] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 863.294427] env[63028]: DEBUG oslo_vmware.rw_handles [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Completed reading data from the image iterator. {{(pid=63028) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 863.294719] env[63028]: DEBUG oslo_vmware.rw_handles [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ddb32d-a300-3ffa-52af-50f17fbda9cc/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 863.295716] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607b5d99-1d4e-488a-bcbc-4eb4a94eca5d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.303692] env[63028]: DEBUG oslo_vmware.rw_handles [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ddb32d-a300-3ffa-52af-50f17fbda9cc/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 863.303886] env[63028]: DEBUG oslo_vmware.rw_handles [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ddb32d-a300-3ffa-52af-50f17fbda9cc/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 863.304197] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-2369a155-9ea9-4c16-856e-545b6ab99554 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.503545] env[63028]: DEBUG oslo_vmware.rw_handles [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ddb32d-a300-3ffa-52af-50f17fbda9cc/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 863.503768] env[63028]: INFO nova.virt.vmwareapi.images [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Downloaded image file data c9823cf3-c81e-4b18-855a-a01f46d8c790 [ 863.504617] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc519616-e7f9-4410-be03-81c42af9ea6e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.521496] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0e1e7d3-b667-4a47-a999-8d5dca84e310 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.551027] env[63028]: INFO nova.virt.vmwareapi.images [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] The imported VM was unregistered [ 863.553616] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Caching image {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 863.553871] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating directory with path [datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790 {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.554182] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56f311fa-10ed-43c7-a83c-191da2755357 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.566719] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Created directory with path [datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790 {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.566962] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_b84721b1-833b-407b-a3b9-93fbbcee8a1c/OSTACK_IMG_b84721b1-833b-407b-a3b9-93fbbcee8a1c.vmdk to [datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790/c9823cf3-c81e-4b18-855a-a01f46d8c790.vmdk. {{(pid=63028) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 863.567267] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-86389d45-e71c-4bc6-a91f-2ab513d11cc6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.576258] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 863.576258] env[63028]: value = "task-2735693" [ 863.576258] env[63028]: _type = "Task" [ 863.576258] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.585095] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735693, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.610590] env[63028]: DEBUG nova.network.neutron [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Successfully updated port: 4da14eb6-411a-4cdd-afe0-bd34e474882f {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 863.617996] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735688, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.640139] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5226f289-9084-221f-c971-91978f2f06b3, 'name': SearchDatastore_Task, 'duration_secs': 0.036177} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.640404] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.640666] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 79f4ef22-a589-4d5c-8832-5d5dcdd55561/79f4ef22-a589-4d5c-8832-5d5dcdd55561.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 863.641013] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c27ce96-bf41-4783-9994-5063c3d2e76d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.651105] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 863.651105] env[63028]: value = "task-2735694" [ 863.651105] env[63028]: _type = "Task" [ 863.651105] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.660866] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735694, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.687934] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735691, 'name': CreateVM_Task, 'duration_secs': 0.388249} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.687934] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 863.687934] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.687934] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.687934] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 863.688294] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82fd5745-a649-421a-acf0-31eec47c6c29 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.695607] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 863.695607] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ee56a8-63a5-836a-7df6-de5e2d09e14e" [ 863.695607] env[63028]: _type = "Task" [ 863.695607] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.708774] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ee56a8-63a5-836a-7df6-de5e2d09e14e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.729467] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.730s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.730131] env[63028]: DEBUG nova.compute.manager [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 863.735312] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.546s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.736740] env[63028]: INFO nova.compute.claims [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 863.740023] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: f80df630-327b-4923-a785-5d2e48fe1f19] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 863.912690] env[63028]: DEBUG nova.network.neutron [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Updated VIF entry in instance network info cache for port abc41dea-8b6f-4cf7-b02f-21996a0aaf8d. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 863.913154] env[63028]: DEBUG nova.network.neutron [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Updating instance_info_cache with network_info: [{"id": "abc41dea-8b6f-4cf7-b02f-21996a0aaf8d", "address": "fa:16:3e:35:71:27", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabc41dea-8b", "ovs_interfaceid": "abc41dea-8b6f-4cf7-b02f-21996a0aaf8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.091048] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735693, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.115252] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "refresh_cache-1d008794-3c1a-46c6-b4eb-3d5441efdb22" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.115482] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "refresh_cache-1d008794-3c1a-46c6-b4eb-3d5441efdb22" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.115717] env[63028]: DEBUG nova.network.neutron [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 864.122079] env[63028]: DEBUG oslo_vmware.api [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735688, 'name': PowerOnVM_Task, 'duration_secs': 0.597803} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.122079] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 864.122462] env[63028]: INFO nova.compute.manager [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Took 11.90 seconds to spawn the instance on the hypervisor. [ 864.122462] env[63028]: DEBUG nova.compute.manager [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 864.123251] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e916a735-c01d-4b37-b1ba-cded2e2dc0d8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.166393] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735694, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.210541] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ee56a8-63a5-836a-7df6-de5e2d09e14e, 'name': SearchDatastore_Task, 'duration_secs': 0.016086} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.210905] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.211190] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.211441] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.211618] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.211874] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.213057] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88dfb196-f6b7-4932-bab6-90820d514bfa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.217886] env[63028]: DEBUG nova.compute.manager [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Received event network-vif-plugged-4da14eb6-411a-4cdd-afe0-bd34e474882f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 864.217886] env[63028]: DEBUG oslo_concurrency.lockutils [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] Acquiring lock "1d008794-3c1a-46c6-b4eb-3d5441efdb22-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.218317] env[63028]: DEBUG oslo_concurrency.lockutils [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] Lock "1d008794-3c1a-46c6-b4eb-3d5441efdb22-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.218317] env[63028]: DEBUG oslo_concurrency.lockutils [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] Lock "1d008794-3c1a-46c6-b4eb-3d5441efdb22-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.218467] env[63028]: DEBUG nova.compute.manager [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] No waiting events found dispatching network-vif-plugged-4da14eb6-411a-4cdd-afe0-bd34e474882f {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 864.219396] env[63028]: WARNING nova.compute.manager [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Received unexpected event network-vif-plugged-4da14eb6-411a-4cdd-afe0-bd34e474882f for instance with vm_state building and task_state spawning. [ 864.219396] env[63028]: DEBUG nova.compute.manager [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Received event network-changed-4da14eb6-411a-4cdd-afe0-bd34e474882f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 864.219396] env[63028]: DEBUG nova.compute.manager [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Refreshing instance network info cache due to event network-changed-4da14eb6-411a-4cdd-afe0-bd34e474882f. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 864.219396] env[63028]: DEBUG oslo_concurrency.lockutils [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] Acquiring lock "refresh_cache-1d008794-3c1a-46c6-b4eb-3d5441efdb22" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.234935] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.235198] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 864.236770] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19c46dcf-237e-46c7-98f9-48b48093a4d8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.241431] env[63028]: DEBUG nova.compute.utils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 864.243073] env[63028]: DEBUG nova.compute.manager [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 864.243073] env[63028]: DEBUG nova.network.neutron [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.246770] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: f311a533-5c48-410b-ba3b-58f0032c8816] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 864.250379] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 864.250379] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5285f666-cc80-dff3-6689-0d7ad7df6d70" [ 864.250379] env[63028]: _type = "Task" [ 864.250379] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.267167] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5285f666-cc80-dff3-6689-0d7ad7df6d70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.320247] env[63028]: DEBUG nova.policy [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '736ca268dc2c434aac2165473ea28d99', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2304ce21bf141cab94fb6c342653812', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 864.416173] env[63028]: DEBUG oslo_concurrency.lockutils [req-110dc71e-edc9-4ff0-9d94-946f85ec8d8b req-249da606-d80f-42e3-8b94-cf52c1174c8a service nova] Releasing lock "refresh_cache-a50e1167-d8ed-4099-83c3-a5066ab0be1f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.595289] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735693, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.657391] env[63028]: INFO nova.compute.manager [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Took 48.48 seconds to build instance. [ 864.670826] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735694, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.753046] env[63028]: DEBUG nova.compute.manager [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 864.766333] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 94b1bf30-0f9b-4197-99ff-6631a13ab2d1] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 864.784913] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5285f666-cc80-dff3-6689-0d7ad7df6d70, 'name': SearchDatastore_Task, 'duration_secs': 0.085918} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.786380] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b23118a8-4a46-4757-9c94-3633188b100c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.797144] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 864.797144] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522ca031-4671-300d-0bdc-fcbb47c1f3ff" [ 864.797144] env[63028]: _type = "Task" [ 864.797144] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.828806] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522ca031-4671-300d-0bdc-fcbb47c1f3ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.833489] env[63028]: DEBUG nova.network.neutron [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.095013] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735693, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.137479] env[63028]: DEBUG nova.network.neutron [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Updating instance_info_cache with network_info: [{"id": "4da14eb6-411a-4cdd-afe0-bd34e474882f", "address": "fa:16:3e:43:b4:9d", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4da14eb6-41", "ovs_interfaceid": "4da14eb6-411a-4cdd-afe0-bd34e474882f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.160124] env[63028]: DEBUG oslo_concurrency.lockutils [None req-81b1346f-eb21-48c7-8ffd-fb557f6b5405 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "4e859327-ccd3-440e-b884-67f6cdadf97f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.930s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.165245] env[63028]: DEBUG nova.network.neutron [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Successfully created port: f5e9674b-220d-4e59-bec7-5b1512efb29e {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.173901] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735694, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.274048] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: a167df01-05e4-453d-8800-9c104d912474] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 865.317074] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522ca031-4671-300d-0bdc-fcbb47c1f3ff, 'name': SearchDatastore_Task, 'duration_secs': 0.162514} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.317409] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.317681] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] a50e1167-d8ed-4099-83c3-a5066ab0be1f/a50e1167-d8ed-4099-83c3-a5066ab0be1f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 865.318023] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-573b84ee-c4af-411a-8054-89fc28423ea2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.331615] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 865.331615] env[63028]: value = "task-2735695" [ 865.331615] env[63028]: _type = "Task" [ 865.331615] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.347607] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.402625] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34631363-0652-4a35-b094-7ae73ba9e44e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.415496] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556a9621-99fb-4f03-b92f-f965fc24e965 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.458091] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582b0e82-bb72-49da-b303-6a7be6b5c0bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.470473] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14eda7b1-d703-43f0-88c6-78f17a755207 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.490549] env[63028]: DEBUG nova.compute.provider_tree [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.592423] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735693, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.643822] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "refresh_cache-1d008794-3c1a-46c6-b4eb-3d5441efdb22" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.644216] env[63028]: DEBUG nova.compute.manager [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Instance network_info: |[{"id": "4da14eb6-411a-4cdd-afe0-bd34e474882f", "address": "fa:16:3e:43:b4:9d", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4da14eb6-41", "ovs_interfaceid": "4da14eb6-411a-4cdd-afe0-bd34e474882f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 865.644562] env[63028]: DEBUG oslo_concurrency.lockutils [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] Acquired lock "refresh_cache-1d008794-3c1a-46c6-b4eb-3d5441efdb22" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.644821] env[63028]: DEBUG nova.network.neutron [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Refreshing network info cache for port 4da14eb6-411a-4cdd-afe0-bd34e474882f {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.646035] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:b4:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0954fad3-d24d-496c-83e6-a09d3cb556fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4da14eb6-411a-4cdd-afe0-bd34e474882f', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.653640] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Creating folder: Project (e2304ce21bf141cab94fb6c342653812). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.656968] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b94b174-ffd5-417e-bca8-dbe8fb703059 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.663183] env[63028]: DEBUG nova.compute.manager [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 865.674188] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735694, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.639854} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.674485] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 79f4ef22-a589-4d5c-8832-5d5dcdd55561/79f4ef22-a589-4d5c-8832-5d5dcdd55561.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.674695] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.676296] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bea1cdc6-036b-4198-954a-7c0f560a011f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.678700] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Created folder: Project (e2304ce21bf141cab94fb6c342653812) in parent group-v550570. [ 865.678922] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Creating folder: Instances. Parent ref: group-v550785. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.679334] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5ec2dcd-3c96-441c-9900-41a947a07350 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.681654] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "4e859327-ccd3-440e-b884-67f6cdadf97f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.681654] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "4e859327-ccd3-440e-b884-67f6cdadf97f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.681801] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "4e859327-ccd3-440e-b884-67f6cdadf97f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.681885] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "4e859327-ccd3-440e-b884-67f6cdadf97f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.682063] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "4e859327-ccd3-440e-b884-67f6cdadf97f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.686344] env[63028]: INFO nova.compute.manager [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Terminating instance [ 865.689880] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 865.689880] env[63028]: value = "task-2735697" [ 865.689880] env[63028]: _type = "Task" [ 865.689880] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.700826] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Created folder: Instances in parent group-v550785. [ 865.700826] env[63028]: DEBUG oslo.service.loopingcall [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.701801] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.702559] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-551c0dba-e995-4d11-ab3d-a531b77e6f01 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.720959] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.730148] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.730148] env[63028]: value = "task-2735699" [ 865.730148] env[63028]: _type = "Task" [ 865.730148] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.742066] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735699, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.774235] env[63028]: DEBUG nova.compute.manager [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 865.777166] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 679fca11-7390-4596-ab74-2f82a6cf8858] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 865.806260] env[63028]: DEBUG nova.virt.hardware [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 865.806522] env[63028]: DEBUG nova.virt.hardware [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.806654] env[63028]: DEBUG nova.virt.hardware [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 865.806867] env[63028]: DEBUG nova.virt.hardware [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.807041] env[63028]: DEBUG nova.virt.hardware [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 865.807196] env[63028]: DEBUG nova.virt.hardware [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 865.807427] env[63028]: DEBUG nova.virt.hardware [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 865.807607] env[63028]: DEBUG nova.virt.hardware [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 865.807779] env[63028]: DEBUG nova.virt.hardware [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 865.807944] env[63028]: DEBUG nova.virt.hardware [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 865.808187] env[63028]: DEBUG nova.virt.hardware [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 865.809559] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea4ebe8-26ce-4f04-a90d-cf108aa67202 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.826527] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58f164c-6d0a-4037-a274-30d99a75c1f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.858127] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.940531] env[63028]: DEBUG nova.network.neutron [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Updated VIF entry in instance network info cache for port 4da14eb6-411a-4cdd-afe0-bd34e474882f. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 865.941009] env[63028]: DEBUG nova.network.neutron [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Updating instance_info_cache with network_info: [{"id": "4da14eb6-411a-4cdd-afe0-bd34e474882f", "address": "fa:16:3e:43:b4:9d", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4da14eb6-41", "ovs_interfaceid": "4da14eb6-411a-4cdd-afe0-bd34e474882f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.994052] env[63028]: DEBUG nova.scheduler.client.report [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 866.102025] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735693, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.188468] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.201530] env[63028]: DEBUG nova.compute.manager [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 866.201749] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 866.202033] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095196} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.202780] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07035771-9b7d-4c4f-b051-af77b9d77c9c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.205986] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 866.206781] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bef35db-3c6c-4602-944e-1ebfdd0c232f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.223894] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 866.232594] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 79f4ef22-a589-4d5c-8832-5d5dcdd55561/79f4ef22-a589-4d5c-8832-5d5dcdd55561.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 866.232908] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fae91c9a-6852-401d-b216-09200cf8b121 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.235222] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24044f5a-793c-4899-b34e-3bca3e5251e4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.264308] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735699, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.264640] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 866.264640] env[63028]: value = "task-2735701" [ 866.264640] env[63028]: _type = "Task" [ 866.264640] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.266086] env[63028]: DEBUG oslo_vmware.api [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 866.266086] env[63028]: value = "task-2735700" [ 866.266086] env[63028]: _type = "Task" [ 866.266086] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.280021] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735701, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.283235] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 413f7fea-452b-463f-b396-cdd29e8ffa91] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 866.285687] env[63028]: DEBUG oslo_vmware.api [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735700, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.347392] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735695, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.444657] env[63028]: DEBUG oslo_concurrency.lockutils [req-0b0d57e6-c834-40bf-b5bd-87b2f61346ce req-83c6bb9c-8f0b-43d0-810c-a6c6db27e54d service nova] Releasing lock "refresh_cache-1d008794-3c1a-46c6-b4eb-3d5441efdb22" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.499665] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.764s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.500362] env[63028]: DEBUG nova.compute.manager [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 866.503776] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.101s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.504183] env[63028]: DEBUG nova.objects.instance [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63028) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 866.596591] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735693, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.693064} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.597015] env[63028]: INFO nova.virt.vmwareapi.ds_util [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_b84721b1-833b-407b-a3b9-93fbbcee8a1c/OSTACK_IMG_b84721b1-833b-407b-a3b9-93fbbcee8a1c.vmdk to [datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790/c9823cf3-c81e-4b18-855a-a01f46d8c790.vmdk. [ 866.597319] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Cleaning up location [datastore1] OSTACK_IMG_b84721b1-833b-407b-a3b9-93fbbcee8a1c {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 866.597414] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_b84721b1-833b-407b-a3b9-93fbbcee8a1c {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 866.597740] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3d2af47-90f2-42a2-bae8-540fc2df6a74 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.611337] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 866.611337] env[63028]: value = "task-2735702" [ 866.611337] env[63028]: _type = "Task" [ 866.611337] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.623266] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735702, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.745799] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735699, 'name': CreateVM_Task, 'duration_secs': 0.90898} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.745963] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 866.746703] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.746833] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.747173] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 866.747424] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65e7f39a-0127-4b72-ac0c-855d90b639d8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.753573] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 866.753573] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52226dab-7552-3e97-bf7d-92f45094c694" [ 866.753573] env[63028]: _type = "Task" [ 866.753573] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.763051] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52226dab-7552-3e97-bf7d-92f45094c694, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.776231] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735701, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.781829] env[63028]: DEBUG oslo_vmware.api [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735700, 'name': PowerOffVM_Task, 'duration_secs': 0.265724} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.782143] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 866.782320] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 866.782565] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f48e04ad-92a4-47ee-97e3-5c8afce160ab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.791019] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 03a19e41-1146-4560-8d93-16a23aa952da] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 866.844369] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735695, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.323983} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.844711] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] a50e1167-d8ed-4099-83c3-a5066ab0be1f/a50e1167-d8ed-4099-83c3-a5066ab0be1f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 866.845039] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 866.845421] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96ff692a-87d2-4ec3-8532-6d430e0051dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.850980] env[63028]: DEBUG nova.compute.manager [req-6321b1ee-9b5c-41e1-818a-fb04190c5c5e req-4c3423c2-e334-4de5-8bc7-c9a7304607c0 service nova] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Received event network-vif-plugged-f5e9674b-220d-4e59-bec7-5b1512efb29e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 866.851269] env[63028]: DEBUG oslo_concurrency.lockutils [req-6321b1ee-9b5c-41e1-818a-fb04190c5c5e req-4c3423c2-e334-4de5-8bc7-c9a7304607c0 service nova] Acquiring lock "f0ca0d73-d428-4b8c-acac-a80b7b7dd793-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.851525] env[63028]: DEBUG oslo_concurrency.lockutils [req-6321b1ee-9b5c-41e1-818a-fb04190c5c5e req-4c3423c2-e334-4de5-8bc7-c9a7304607c0 service nova] Lock "f0ca0d73-d428-4b8c-acac-a80b7b7dd793-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.851740] env[63028]: DEBUG oslo_concurrency.lockutils [req-6321b1ee-9b5c-41e1-818a-fb04190c5c5e req-4c3423c2-e334-4de5-8bc7-c9a7304607c0 service nova] Lock "f0ca0d73-d428-4b8c-acac-a80b7b7dd793-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.851928] env[63028]: DEBUG nova.compute.manager [req-6321b1ee-9b5c-41e1-818a-fb04190c5c5e req-4c3423c2-e334-4de5-8bc7-c9a7304607c0 service nova] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] No waiting events found dispatching network-vif-plugged-f5e9674b-220d-4e59-bec7-5b1512efb29e {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 866.852139] env[63028]: WARNING nova.compute.manager [req-6321b1ee-9b5c-41e1-818a-fb04190c5c5e req-4c3423c2-e334-4de5-8bc7-c9a7304607c0 service nova] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Received unexpected event network-vif-plugged-f5e9674b-220d-4e59-bec7-5b1512efb29e for instance with vm_state building and task_state spawning. [ 866.854018] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 866.854018] env[63028]: value = "task-2735704" [ 866.854018] env[63028]: _type = "Task" [ 866.854018] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.864407] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735704, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.895530] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 866.896623] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 866.896882] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Deleting the datastore file [datastore2] 4e859327-ccd3-440e-b884-67f6cdadf97f {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 866.897207] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3349437-7744-4372-b863-7de56540ddc3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.906172] env[63028]: DEBUG oslo_vmware.api [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for the task: (returnval){ [ 866.906172] env[63028]: value = "task-2735705" [ 866.906172] env[63028]: _type = "Task" [ 866.906172] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.915956] env[63028]: DEBUG oslo_vmware.api [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735705, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.925997] env[63028]: DEBUG nova.network.neutron [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Successfully updated port: f5e9674b-220d-4e59-bec7-5b1512efb29e {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.009859] env[63028]: DEBUG nova.compute.utils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 867.015672] env[63028]: DEBUG nova.compute.manager [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 867.015847] env[63028]: DEBUG nova.network.neutron [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 867.061788] env[63028]: DEBUG nova.policy [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43ed2fb3f1a944fdac8ee7778f171cd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8efc6d89903c454eb39136a76e0adef5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 867.122775] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735702, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.061629} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.123038] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 867.123211] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790/c9823cf3-c81e-4b18-855a-a01f46d8c790.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.123451] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790/c9823cf3-c81e-4b18-855a-a01f46d8c790.vmdk to [datastore1] 9773ad95-1894-471d-8020-c7952eac4be4/9773ad95-1894-471d-8020-c7952eac4be4.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.123698] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79dba6d5-7b60-43cf-822f-8675aeea10da {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.131282] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 867.131282] env[63028]: value = "task-2735706" [ 867.131282] env[63028]: _type = "Task" [ 867.131282] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.139571] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735706, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.265286] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52226dab-7552-3e97-bf7d-92f45094c694, 'name': SearchDatastore_Task, 'duration_secs': 0.011639} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.265663] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.265940] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.266236] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.267618] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.267618] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 867.267618] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8dfb6616-b3e0-4936-94a4-7ecf8606282a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.281875] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735701, 'name': ReconfigVM_Task, 'duration_secs': 0.620013} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.283382] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 79f4ef22-a589-4d5c-8832-5d5dcdd55561/79f4ef22-a589-4d5c-8832-5d5dcdd55561.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.284090] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.285103] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 867.287112] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e166c8b-2f4e-44b1-b847-876c573af45b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.289228] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9eed8b91-ad55-4997-b4f7-0cf8a4b3099c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.292530] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 867.293014] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Cleaning up deleted instances with incomplete migration {{(pid=63028) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 867.304214] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 867.304214] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e23b6c-9d8f-f759-a6cc-03b7d7a6f083" [ 867.304214] env[63028]: _type = "Task" [ 867.304214] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.304589] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 867.304589] env[63028]: value = "task-2735707" [ 867.304589] env[63028]: _type = "Task" [ 867.304589] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.323956] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e23b6c-9d8f-f759-a6cc-03b7d7a6f083, 'name': SearchDatastore_Task, 'duration_secs': 0.011282} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.326959] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735707, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.327633] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1da7c211-aa5e-4413-94b7-c1c607800534 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.334179] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 867.334179] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ad10b9-80af-e1a8-3075-e5765c36e0e4" [ 867.334179] env[63028]: _type = "Task" [ 867.334179] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.343159] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ad10b9-80af-e1a8-3075-e5765c36e0e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.363824] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735704, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082034} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.364126] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 867.364933] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1594e885-76d9-4de2-ba4f-4cac23a56d52 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.368754] env[63028]: DEBUG nova.network.neutron [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Successfully created port: ae90a55f-7d07-4ee6-b266-85db7d8ebdad {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 867.391128] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] a50e1167-d8ed-4099-83c3-a5066ab0be1f/a50e1167-d8ed-4099-83c3-a5066ab0be1f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 867.391769] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e5acf51-fbe1-474b-8b45-4d58c8dbdfa4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.417203] env[63028]: DEBUG oslo_vmware.api [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Task: {'id': task-2735705, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1518} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.419189] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 867.419189] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 867.419189] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 867.419449] env[63028]: INFO nova.compute.manager [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Took 1.22 seconds to destroy the instance on the hypervisor. [ 867.419497] env[63028]: DEBUG oslo.service.loopingcall [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.419741] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 867.419741] env[63028]: value = "task-2735708" [ 867.419741] env[63028]: _type = "Task" [ 867.419741] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.419933] env[63028]: DEBUG nova.compute.manager [-] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 867.420042] env[63028]: DEBUG nova.network.neutron [-] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 867.436538] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "refresh_cache-f0ca0d73-d428-4b8c-acac-a80b7b7dd793" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.436716] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "refresh_cache-f0ca0d73-d428-4b8c-acac-a80b7b7dd793" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.436868] env[63028]: DEBUG nova.network.neutron [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.438880] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735708, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.516502] env[63028]: DEBUG nova.compute.manager [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 867.521201] env[63028]: DEBUG oslo_concurrency.lockutils [None req-db2d1a1b-bab5-4d65-92a7-d30b7a67907c tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.521201] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.664s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.521201] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.523362] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 27.721s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.556025] env[63028]: INFO nova.scheduler.client.report [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Deleted allocations for instance 6e0959ac-8fca-47eb-b501-b50a3e9f025a [ 867.645513] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735706, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.802077] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 867.821026] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735707, 'name': Rename_Task, 'duration_secs': 0.208972} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.821646] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 867.821921] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6eff5fe9-a217-463e-94cd-0f3dd3080eb0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.836309] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 867.836309] env[63028]: value = "task-2735709" [ 867.836309] env[63028]: _type = "Task" [ 867.836309] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.852961] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735709, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.857195] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ad10b9-80af-e1a8-3075-e5765c36e0e4, 'name': SearchDatastore_Task, 'duration_secs': 0.011333} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.857527] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.857799] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 1d008794-3c1a-46c6-b4eb-3d5441efdb22/1d008794-3c1a-46c6-b4eb-3d5441efdb22.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.858085] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4931d1e9-faec-4486-8580-695f0897ef69 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.871453] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 867.871453] env[63028]: value = "task-2735710" [ 867.871453] env[63028]: _type = "Task" [ 867.871453] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.886105] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735710, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.937545] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735708, 'name': ReconfigVM_Task, 'duration_secs': 0.368238} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.937864] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Reconfigured VM instance instance-00000045 to attach disk [datastore1] a50e1167-d8ed-4099-83c3-a5066ab0be1f/a50e1167-d8ed-4099-83c3-a5066ab0be1f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.938551] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a37a982d-a4f0-4742-aefb-7cd5d29dc285 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.953030] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 867.953030] env[63028]: value = "task-2735711" [ 867.953030] env[63028]: _type = "Task" [ 867.953030] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.965572] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735711, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.026630] env[63028]: DEBUG nova.network.neutron [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.031530] env[63028]: INFO nova.compute.claims [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 868.067950] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0279905d-506f-46c0-b4c8-a468b6375aab tempest-ListServerFiltersTestJSON-1339089997 tempest-ListServerFiltersTestJSON-1339089997-project-member] Lock "6e0959ac-8fca-47eb-b501-b50a3e9f025a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.640s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.147794] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735706, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.362139] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735709, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.385309] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735710, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.399699] env[63028]: DEBUG nova.network.neutron [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Updating instance_info_cache with network_info: [{"id": "f5e9674b-220d-4e59-bec7-5b1512efb29e", "address": "fa:16:3e:99:86:70", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5e9674b-22", "ovs_interfaceid": "f5e9674b-220d-4e59-bec7-5b1512efb29e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.466160] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735711, 'name': Rename_Task, 'duration_secs': 0.188226} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.466439] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 868.466709] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e07dfebf-0666-450b-9e12-5c94a90ec9d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.479235] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 868.479235] env[63028]: value = "task-2735712" [ 868.479235] env[63028]: _type = "Task" [ 868.479235] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.495821] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735712, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.538815] env[63028]: INFO nova.compute.resource_tracker [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating resource usage from migration b2428275-4704-4b98-81dd-222c963eb311 [ 868.543799] env[63028]: DEBUG nova.compute.manager [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 868.582033] env[63028]: DEBUG nova.virt.hardware [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 868.582033] env[63028]: DEBUG nova.virt.hardware [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 868.582033] env[63028]: DEBUG nova.virt.hardware [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 868.582033] env[63028]: DEBUG nova.virt.hardware [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 868.582033] env[63028]: DEBUG nova.virt.hardware [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 868.582033] env[63028]: DEBUG nova.virt.hardware [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 868.582033] env[63028]: DEBUG nova.virt.hardware [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 868.582672] env[63028]: DEBUG nova.virt.hardware [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 868.583078] env[63028]: DEBUG nova.virt.hardware [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 868.583399] env[63028]: DEBUG nova.virt.hardware [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 868.583713] env[63028]: DEBUG nova.virt.hardware [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 868.585445] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6931b5e-5b99-4677-ae88-f588a66d627a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.603431] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8769989-db50-455d-961b-59f98ae15cab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.651532] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735706, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.861150] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735709, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.890199] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735710, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.899339] env[63028]: DEBUG nova.compute.manager [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Received event network-changed-f5e9674b-220d-4e59-bec7-5b1512efb29e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 868.899568] env[63028]: DEBUG nova.compute.manager [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Refreshing instance network info cache due to event network-changed-f5e9674b-220d-4e59-bec7-5b1512efb29e. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 868.899826] env[63028]: DEBUG oslo_concurrency.lockutils [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] Acquiring lock "refresh_cache-f0ca0d73-d428-4b8c-acac-a80b7b7dd793" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.902764] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "refresh_cache-f0ca0d73-d428-4b8c-acac-a80b7b7dd793" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.903104] env[63028]: DEBUG nova.compute.manager [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Instance network_info: |[{"id": "f5e9674b-220d-4e59-bec7-5b1512efb29e", "address": "fa:16:3e:99:86:70", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5e9674b-22", "ovs_interfaceid": "f5e9674b-220d-4e59-bec7-5b1512efb29e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 868.903809] env[63028]: DEBUG oslo_concurrency.lockutils [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] Acquired lock "refresh_cache-f0ca0d73-d428-4b8c-acac-a80b7b7dd793" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.904051] env[63028]: DEBUG nova.network.neutron [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Refreshing network info cache for port f5e9674b-220d-4e59-bec7-5b1512efb29e {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.905562] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:86:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0954fad3-d24d-496c-83e6-a09d3cb556fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5e9674b-220d-4e59-bec7-5b1512efb29e', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 868.913378] env[63028]: DEBUG oslo.service.loopingcall [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 868.920548] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 868.921350] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25b86fc9-f5e9-47c7-bce5-be38637597d1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.951956] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 868.951956] env[63028]: value = "task-2735713" [ 868.951956] env[63028]: _type = "Task" [ 868.951956] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.970843] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735713, 'name': CreateVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.000903] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735712, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.019962] env[63028]: DEBUG nova.network.neutron [-] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.148339] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735706, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.170417] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05f35ff-1c30-4b85-8e9a-d3853066cf09 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.186854] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7b6fa3-74bb-4acc-92bd-7c55ad55ba84 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.228156] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99836be-7902-4acf-a1fa-5133da195946 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.238929] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2e18b6-b927-4a93-a2dd-6e9eb43660a8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.258399] env[63028]: DEBUG nova.compute.provider_tree [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.355781] env[63028]: DEBUG oslo_vmware.api [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735709, 'name': PowerOnVM_Task, 'duration_secs': 1.2307} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.356387] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 869.356685] env[63028]: INFO nova.compute.manager [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Took 11.68 seconds to spawn the instance on the hypervisor. [ 869.356878] env[63028]: DEBUG nova.compute.manager [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 869.358337] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597cbea0-d1f6-402a-b400-3b47ef9d8ade {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.373103] env[63028]: DEBUG nova.network.neutron [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Updated VIF entry in instance network info cache for port f5e9674b-220d-4e59-bec7-5b1512efb29e. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 869.373103] env[63028]: DEBUG nova.network.neutron [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Updating instance_info_cache with network_info: [{"id": "f5e9674b-220d-4e59-bec7-5b1512efb29e", "address": "fa:16:3e:99:86:70", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5e9674b-22", "ovs_interfaceid": "f5e9674b-220d-4e59-bec7-5b1512efb29e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.386279] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735710, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.465776] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735713, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.493013] env[63028]: DEBUG oslo_vmware.api [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735712, 'name': PowerOnVM_Task, 'duration_secs': 0.593376} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.493302] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 869.493513] env[63028]: INFO nova.compute.manager [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Took 9.17 seconds to spawn the instance on the hypervisor. [ 869.493694] env[63028]: DEBUG nova.compute.manager [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 869.494525] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75596613-14ba-4bad-8100-a3eb5ce03c4e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.524984] env[63028]: INFO nova.compute.manager [-] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Took 2.10 seconds to deallocate network for instance. [ 869.566985] env[63028]: DEBUG nova.network.neutron [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Successfully updated port: ae90a55f-7d07-4ee6-b266-85db7d8ebdad {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 869.648462] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735706, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.762671] env[63028]: DEBUG nova.scheduler.client.report [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 869.884593] env[63028]: DEBUG oslo_concurrency.lockutils [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] Releasing lock "refresh_cache-f0ca0d73-d428-4b8c-acac-a80b7b7dd793" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.884748] env[63028]: DEBUG nova.compute.manager [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Received event network-vif-deleted-e7cad309-0a34-4148-9d0b-e47549d8689d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 869.885588] env[63028]: INFO nova.compute.manager [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Neutron deleted interface e7cad309-0a34-4148-9d0b-e47549d8689d; detaching it from the instance and deleting it from the info cache [ 869.885588] env[63028]: DEBUG nova.network.neutron [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Updating instance_info_cache with network_info: [{"id": "a9813eb4-922c-4f70-8c74-e7e5f11caf7d", "address": "fa:16:3e:7b:11:b1", "network": {"id": "56fd7781-6150-4604-b524-dccaff73dc3a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1118372812", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "97060d5fb7e8454eadaf5dc9b426a248", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9813eb4-92", "ovs_interfaceid": "a9813eb4-922c-4f70-8c74-e7e5f11caf7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.890471] env[63028]: INFO nova.compute.manager [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Took 48.95 seconds to build instance. [ 869.896963] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735710, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.964619] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735713, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.017020] env[63028]: INFO nova.compute.manager [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Took 46.33 seconds to build instance. [ 870.031327] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.070264] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "refresh_cache-3b90dbb8-66ce-435f-beae-5464720bfb3e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.070451] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "refresh_cache-3b90dbb8-66ce-435f-beae-5464720bfb3e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.070598] env[63028]: DEBUG nova.network.neutron [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 870.146934] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735706, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.519004} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.147225] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9823cf3-c81e-4b18-855a-a01f46d8c790/c9823cf3-c81e-4b18-855a-a01f46d8c790.vmdk to [datastore1] 9773ad95-1894-471d-8020-c7952eac4be4/9773ad95-1894-471d-8020-c7952eac4be4.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.148038] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b9148c-ada4-419b-8820-bc93475bf631 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.171304] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 9773ad95-1894-471d-8020-c7952eac4be4/9773ad95-1894-471d-8020-c7952eac4be4.vmdk or device None with type streamOptimized {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.171654] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-042169d0-12b7-4459-92ae-f15bfb0b4e5b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.191985] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 870.191985] env[63028]: value = "task-2735714" [ 870.191985] env[63028]: _type = "Task" [ 870.191985] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.199982] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735714, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.268788] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.745s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.269082] env[63028]: INFO nova.compute.manager [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Migrating [ 870.277547] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.983s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.279908] env[63028]: INFO nova.compute.claims [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 870.385858] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735710, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.124576} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.386160] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 1d008794-3c1a-46c6-b4eb-3d5441efdb22/1d008794-3c1a-46c6-b4eb-3d5441efdb22.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.386412] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.386736] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d142076f-2a0a-44a0-ab1e-dbf0cc8e5f6f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.392148] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f1717c1-4028-46d6-a213-3926ab374c4e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.392190] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c457f1a3-7cf4-4d81-ade9-2592889b1fa6 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.880s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.397707] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 870.397707] env[63028]: value = "task-2735715" [ 870.397707] env[63028]: _type = "Task" [ 870.397707] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.404533] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878964f7-f967-433f-8db0-68cb0b1e82b3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.421623] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735715, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.443854] env[63028]: DEBUG nova.compute.manager [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Detach interface failed, port_id=e7cad309-0a34-4148-9d0b-e47549d8689d, reason: Instance 4e859327-ccd3-440e-b884-67f6cdadf97f could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 870.444160] env[63028]: DEBUG nova.compute.manager [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Received event network-vif-deleted-a9813eb4-922c-4f70-8c74-e7e5f11caf7d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 870.444348] env[63028]: INFO nova.compute.manager [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Neutron deleted interface a9813eb4-922c-4f70-8c74-e7e5f11caf7d; detaching it from the instance and deleting it from the info cache [ 870.444533] env[63028]: DEBUG nova.network.neutron [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.464613] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735713, 'name': CreateVM_Task, 'duration_secs': 1.206522} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.464705] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 870.465504] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.465600] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.465960] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 870.466154] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f15686d-f43b-4a4f-8c95-aec846b921eb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.471212] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 870.471212] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ed9ffc-6e81-ee38-bc96-65df17b1336f" [ 870.471212] env[63028]: _type = "Task" [ 870.471212] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.481027] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ed9ffc-6e81-ee38-bc96-65df17b1336f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.519494] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4c4ff793-44ac-4d71-af67-67f78b3ae605 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.927s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.603838] env[63028]: DEBUG nova.network.neutron [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.705761] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735714, 'name': ReconfigVM_Task, 'duration_secs': 0.28749} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.706122] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 9773ad95-1894-471d-8020-c7952eac4be4/9773ad95-1894-471d-8020-c7952eac4be4.vmdk or device None with type streamOptimized {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.706877] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-598e923f-f1be-4db2-95e0-0d674a344c2e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.713612] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 870.713612] env[63028]: value = "task-2735716" [ 870.713612] env[63028]: _type = "Task" [ 870.713612] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.723703] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735716, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.801179] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.801822] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.801822] env[63028]: DEBUG nova.network.neutron [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 870.826812] env[63028]: DEBUG nova.network.neutron [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Updating instance_info_cache with network_info: [{"id": "ae90a55f-7d07-4ee6-b266-85db7d8ebdad", "address": "fa:16:3e:53:db:2a", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae90a55f-7d", "ovs_interfaceid": "ae90a55f-7d07-4ee6-b266-85db7d8ebdad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.910428] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735715, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074169} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.910720] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.912847] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8e943f-6ede-40a1-be4f-f3f590803180 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.935628] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 1d008794-3c1a-46c6-b4eb-3d5441efdb22/1d008794-3c1a-46c6-b4eb-3d5441efdb22.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.936027] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d57154ad-3000-4ed3-8393-dfedccdab590 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.953358] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59218f82-1dd6-4217-afc8-a8d931d8d0f6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.964589] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 870.964589] env[63028]: value = "task-2735717" [ 870.964589] env[63028]: _type = "Task" [ 870.964589] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.967104] env[63028]: DEBUG nova.compute.manager [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Received event network-vif-plugged-ae90a55f-7d07-4ee6-b266-85db7d8ebdad {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 870.967850] env[63028]: DEBUG oslo_concurrency.lockutils [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] Acquiring lock "3b90dbb8-66ce-435f-beae-5464720bfb3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.968298] env[63028]: DEBUG oslo_concurrency.lockutils [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] Lock "3b90dbb8-66ce-435f-beae-5464720bfb3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.968558] env[63028]: DEBUG oslo_concurrency.lockutils [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] Lock "3b90dbb8-66ce-435f-beae-5464720bfb3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.969423] env[63028]: DEBUG nova.compute.manager [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] No waiting events found dispatching network-vif-plugged-ae90a55f-7d07-4ee6-b266-85db7d8ebdad {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 870.969654] env[63028]: WARNING nova.compute.manager [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Received unexpected event network-vif-plugged-ae90a55f-7d07-4ee6-b266-85db7d8ebdad for instance with vm_state building and task_state spawning. [ 870.970097] env[63028]: DEBUG nova.compute.manager [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Received event network-changed-ae90a55f-7d07-4ee6-b266-85db7d8ebdad {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 870.970097] env[63028]: DEBUG nova.compute.manager [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Refreshing instance network info cache due to event network-changed-ae90a55f-7d07-4ee6-b266-85db7d8ebdad. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 870.970187] env[63028]: DEBUG oslo_concurrency.lockutils [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] Acquiring lock "refresh_cache-3b90dbb8-66ce-435f-beae-5464720bfb3e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.973490] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb826cd-ad51-4dd7-b0d4-3f76bec13cd6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.996918] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.000736] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ed9ffc-6e81-ee38-bc96-65df17b1336f, 'name': SearchDatastore_Task, 'duration_secs': 0.017524} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.001061] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.002719] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 871.002719] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.002719] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.002719] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 871.002719] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-247addac-8831-47ca-8e23-086da5367af8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.021764] env[63028]: DEBUG nova.compute.manager [req-72b18436-86bd-4ef7-b60d-4a6a2ea94dc5 req-13f995b9-0987-4ec1-b5c6-9080903ceec4 service nova] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Detach interface failed, port_id=a9813eb4-922c-4f70-8c74-e7e5f11caf7d, reason: Instance 4e859327-ccd3-440e-b884-67f6cdadf97f could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 871.029831] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 871.030275] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 871.030830] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e22c393c-63e2-49f9-92d7-131decc65f28 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.038156] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 871.038156] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526802d2-435f-ba75-cd95-0ed884857e20" [ 871.038156] env[63028]: _type = "Task" [ 871.038156] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.048787] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526802d2-435f-ba75-cd95-0ed884857e20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.224295] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735716, 'name': Rename_Task, 'duration_secs': 0.15036} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.224557] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 871.224790] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b95db53-4f70-49a0-aee0-75633f65c633 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.230401] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 871.230401] env[63028]: value = "task-2735718" [ 871.230401] env[63028]: _type = "Task" [ 871.230401] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.237759] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735718, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.332426] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "refresh_cache-3b90dbb8-66ce-435f-beae-5464720bfb3e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.332748] env[63028]: DEBUG nova.compute.manager [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Instance network_info: |[{"id": "ae90a55f-7d07-4ee6-b266-85db7d8ebdad", "address": "fa:16:3e:53:db:2a", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae90a55f-7d", "ovs_interfaceid": "ae90a55f-7d07-4ee6-b266-85db7d8ebdad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 871.333314] env[63028]: DEBUG oslo_concurrency.lockutils [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] Acquired lock "refresh_cache-3b90dbb8-66ce-435f-beae-5464720bfb3e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.333619] env[63028]: DEBUG nova.network.neutron [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Refreshing network info cache for port ae90a55f-7d07-4ee6-b266-85db7d8ebdad {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 871.334991] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:db:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae90a55f-7d07-4ee6-b266-85db7d8ebdad', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 871.344145] env[63028]: DEBUG oslo.service.loopingcall [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.349883] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 871.350365] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37ae696a-8277-4613-9776-2d2680608556 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.374319] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 871.374319] env[63028]: value = "task-2735719" [ 871.374319] env[63028]: _type = "Task" [ 871.374319] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.384871] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735719, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.483612] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.551519] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526802d2-435f-ba75-cd95-0ed884857e20, 'name': SearchDatastore_Task, 'duration_secs': 0.009804} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.557645] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ed13349-d8f4-4e93-aeef-0d5a6d3fad9b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.567021] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 871.567021] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52acea45-1ca5-4fce-f211-74a864a9ebda" [ 871.567021] env[63028]: _type = "Task" [ 871.567021] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.575581] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52acea45-1ca5-4fce-f211-74a864a9ebda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.581236] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "addcf94a-1a56-49ff-8adb-3ce7f2d1e09e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.581236] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "addcf94a-1a56-49ff-8adb-3ce7f2d1e09e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.621828] env[63028]: DEBUG nova.network.neutron [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance_info_cache with network_info: [{"id": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "address": "fa:16:3e:cc:b1:42", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9be02f8-7e", "ovs_interfaceid": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.743546] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735718, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.865508] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c5537f-97ad-4c50-afa3-b243ff91eb30 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.874351] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b01147a-9672-4084-a705-5014f8520cbf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.889801] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735719, 'name': CreateVM_Task, 'duration_secs': 0.320657} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.928186] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.929955] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.930212] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.930635] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 871.931763] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841a839b-7059-46c8-a9c8-1f0e087796f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.935958] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d170de52-e8a1-422a-9c73-2dd3a08995a8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.942141] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 871.942141] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ebf43a-8107-f47c-ca54-d3a404ae573e" [ 871.942141] env[63028]: _type = "Task" [ 871.942141] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.951244] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080c39a2-39d2-437f-b060-3955a32938c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.964289] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ebf43a-8107-f47c-ca54-d3a404ae573e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.977202] env[63028]: DEBUG nova.compute.provider_tree [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.992715] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735717, 'name': ReconfigVM_Task, 'duration_secs': 0.707258} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.993668] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 1d008794-3c1a-46c6-b4eb-3d5441efdb22/1d008794-3c1a-46c6-b4eb-3d5441efdb22.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.994322] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ca1bcd3-0cf8-4bee-bfc0-17669e4fbcda {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.001399] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 872.001399] env[63028]: value = "task-2735720" [ 872.001399] env[63028]: _type = "Task" [ 872.001399] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.010060] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735720, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.076413] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52acea45-1ca5-4fce-f211-74a864a9ebda, 'name': SearchDatastore_Task, 'duration_secs': 0.01169} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.079370] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.079670] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] f0ca0d73-d428-4b8c-acac-a80b7b7dd793/f0ca0d73-d428-4b8c-acac-a80b7b7dd793.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 872.079945] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1fa9f3ad-2071-4fe6-a47d-479dd7d7aa8b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.083272] env[63028]: DEBUG nova.compute.manager [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 872.087707] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 872.087707] env[63028]: value = "task-2735721" [ 872.087707] env[63028]: _type = "Task" [ 872.087707] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.095808] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735721, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.128662] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.156557] env[63028]: DEBUG nova.network.neutron [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Updated VIF entry in instance network info cache for port ae90a55f-7d07-4ee6-b266-85db7d8ebdad. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 872.156987] env[63028]: DEBUG nova.network.neutron [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Updating instance_info_cache with network_info: [{"id": "ae90a55f-7d07-4ee6-b266-85db7d8ebdad", "address": "fa:16:3e:53:db:2a", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae90a55f-7d", "ovs_interfaceid": "ae90a55f-7d07-4ee6-b266-85db7d8ebdad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.246418] env[63028]: DEBUG oslo_vmware.api [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735718, 'name': PowerOnVM_Task, 'duration_secs': 0.532129} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.246724] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 872.246949] env[63028]: INFO nova.compute.manager [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Took 17.28 seconds to spawn the instance on the hypervisor. [ 872.247180] env[63028]: DEBUG nova.compute.manager [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 872.248037] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3bb1125-1f21-4fd8-9ef4-9e42e4c64317 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.455580] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ebf43a-8107-f47c-ca54-d3a404ae573e, 'name': SearchDatastore_Task, 'duration_secs': 0.025487} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.455855] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.456106] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.456346] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.456496] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.456673] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.456944] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b4290d8-7c04-445a-a421-23c51ae06481 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.472052] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.472269] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 872.472969] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6af7717f-c2da-4b0c-b2e6-ee7d72e9c1af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.478077] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 872.478077] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ae68a6-c1dc-02f8-90a2-49e89456fbc1" [ 872.478077] env[63028]: _type = "Task" [ 872.478077] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.481687] env[63028]: DEBUG nova.scheduler.client.report [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 872.489481] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ae68a6-c1dc-02f8-90a2-49e89456fbc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.510244] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735720, 'name': Rename_Task, 'duration_secs': 0.247099} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.510469] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.510696] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bbdb45c6-1614-4c17-bf5c-705a8834bff2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.516401] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 872.516401] env[63028]: value = "task-2735722" [ 872.516401] env[63028]: _type = "Task" [ 872.516401] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.523909] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735722, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.602788] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735721, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.606306] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.660242] env[63028]: DEBUG oslo_concurrency.lockutils [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] Releasing lock "refresh_cache-3b90dbb8-66ce-435f-beae-5464720bfb3e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.660593] env[63028]: DEBUG nova.compute.manager [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Received event network-changed-abc41dea-8b6f-4cf7-b02f-21996a0aaf8d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 872.660775] env[63028]: DEBUG nova.compute.manager [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Refreshing instance network info cache due to event network-changed-abc41dea-8b6f-4cf7-b02f-21996a0aaf8d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 872.661403] env[63028]: DEBUG oslo_concurrency.lockutils [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] Acquiring lock "refresh_cache-a50e1167-d8ed-4099-83c3-a5066ab0be1f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.661403] env[63028]: DEBUG oslo_concurrency.lockutils [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] Acquired lock "refresh_cache-a50e1167-d8ed-4099-83c3-a5066ab0be1f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.661403] env[63028]: DEBUG nova.network.neutron [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Refreshing network info cache for port abc41dea-8b6f-4cf7-b02f-21996a0aaf8d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 872.768268] env[63028]: INFO nova.compute.manager [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Took 52.70 seconds to build instance. [ 872.990584] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.991157] env[63028]: DEBUG nova.compute.manager [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 872.993752] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ae68a6-c1dc-02f8-90a2-49e89456fbc1, 'name': SearchDatastore_Task, 'duration_secs': 0.03249} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.994052] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.700s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.994370] env[63028]: DEBUG nova.objects.instance [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lazy-loading 'resources' on Instance uuid 70888889-4965-47ab-ad47-59f1c1286bd8 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 872.996491] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08686726-94ad-408a-8469-5083b26a124e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.003911] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 873.003911] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fa5e76-0a2d-5562-58b5-de85c5a0d2c8" [ 873.003911] env[63028]: _type = "Task" [ 873.003911] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.014071] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fa5e76-0a2d-5562-58b5-de85c5a0d2c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.027146] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735722, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.102391] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735721, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.005933} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.102644] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] f0ca0d73-d428-4b8c-acac-a80b7b7dd793/f0ca0d73-d428-4b8c-acac-a80b7b7dd793.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 873.102856] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 873.103119] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e40357a2-8ccd-47ad-810b-a8cb79b33017 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.110040] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 873.110040] env[63028]: value = "task-2735723" [ 873.110040] env[63028]: _type = "Task" [ 873.110040] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.119168] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735723, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.270621] env[63028]: DEBUG oslo_concurrency.lockutils [None req-061db4da-ef03-4a90-9d26-7bc23db02692 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "9773ad95-1894-471d-8020-c7952eac4be4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.315s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.394296] env[63028]: DEBUG nova.network.neutron [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Updated VIF entry in instance network info cache for port abc41dea-8b6f-4cf7-b02f-21996a0aaf8d. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 873.394502] env[63028]: DEBUG nova.network.neutron [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Updating instance_info_cache with network_info: [{"id": "abc41dea-8b6f-4cf7-b02f-21996a0aaf8d", "address": "fa:16:3e:35:71:27", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabc41dea-8b", "ovs_interfaceid": "abc41dea-8b6f-4cf7-b02f-21996a0aaf8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.499105] env[63028]: DEBUG nova.compute.utils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 873.500458] env[63028]: DEBUG nova.compute.manager [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 873.500632] env[63028]: DEBUG nova.network.neutron [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 873.516569] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fa5e76-0a2d-5562-58b5-de85c5a0d2c8, 'name': SearchDatastore_Task, 'duration_secs': 0.02553} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.517592] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.517592] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 3b90dbb8-66ce-435f-beae-5464720bfb3e/3b90dbb8-66ce-435f-beae-5464720bfb3e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 873.517592] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e7759cd-c567-42a3-a61c-34226228d3b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.529339] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735722, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.533939] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 873.533939] env[63028]: value = "task-2735724" [ 873.533939] env[63028]: _type = "Task" [ 873.533939] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.541704] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735724, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.543622] env[63028]: DEBUG nova.policy [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4dfc1e9e9e74072949517c7b930c147', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43b7c1be3c4343a4b4f288a355170873', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 873.622146] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735723, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139685} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.622409] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 873.623221] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0fcc94-7e5e-44e8-b75d-5a1fe9c3e571 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.646153] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] f0ca0d73-d428-4b8c-acac-a80b7b7dd793/f0ca0d73-d428-4b8c-acac-a80b7b7dd793.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 873.651156] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86e02397-a011-4cc0-a1c5-c4e765165cc8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.666663] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef11d6ea-ee5d-427c-b07e-3d5326ef623f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.687358] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance 'c06813c4-472d-4bf9-84ec-0d01306bcd48' progress to 0 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 873.694562] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 873.694562] env[63028]: value = "task-2735725" [ 873.694562] env[63028]: _type = "Task" [ 873.694562] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.703685] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.899107] env[63028]: DEBUG oslo_concurrency.lockutils [req-f5ba4e95-23c0-47df-9141-f719664a516d req-a7162ab0-2a2e-4508-9130-fe6046724445 service nova] Releasing lock "refresh_cache-a50e1167-d8ed-4099-83c3-a5066ab0be1f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.900402] env[63028]: DEBUG nova.network.neutron [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Successfully created port: e8d9e159-a748-455b-ba7e-dad129aab175 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 873.924816] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "9773ad95-1894-471d-8020-c7952eac4be4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.928354] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "9773ad95-1894-471d-8020-c7952eac4be4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.004s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.928579] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "9773ad95-1894-471d-8020-c7952eac4be4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.928773] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "9773ad95-1894-471d-8020-c7952eac4be4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.928958] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "9773ad95-1894-471d-8020-c7952eac4be4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.932669] env[63028]: INFO nova.compute.manager [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Terminating instance [ 874.005919] env[63028]: DEBUG nova.compute.manager [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 874.029540] env[63028]: DEBUG oslo_vmware.api [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735722, 'name': PowerOnVM_Task, 'duration_secs': 1.040528} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.029807] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 874.030035] env[63028]: INFO nova.compute.manager [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Took 11.01 seconds to spawn the instance on the hypervisor. [ 874.030225] env[63028]: DEBUG nova.compute.manager [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 874.030997] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295a2a85-cdc1-42a8-910b-aa7512c259d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.052219] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735724, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.141957] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ada488-a167-40c7-b437-e4bc7b59d0ab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.151610] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417c65fd-05f1-4f09-b773-ac3a754a9dae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.185297] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6edd3e4b-6f30-4746-965f-2a26d8cf6164 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.193426] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5932577-1c73-4904-acff-c7e09e280cee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.199075] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.199416] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6d0f396-6eb0-4633-866e-766bcd6091b3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.211614] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.220695] env[63028]: DEBUG nova.compute.provider_tree [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.222785] env[63028]: DEBUG oslo_vmware.api [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 874.222785] env[63028]: value = "task-2735726" [ 874.222785] env[63028]: _type = "Task" [ 874.222785] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.232179] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 874.232403] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance 'c06813c4-472d-4bf9-84ec-0d01306bcd48' progress to 17 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 874.436640] env[63028]: DEBUG nova.compute.manager [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 874.436872] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.437955] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744aaea0-8d36-4911-aca2-9710f8a62532 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.447395] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.447787] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bbca6eb-ec17-4e38-8199-24a197e66822 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.455197] env[63028]: DEBUG oslo_vmware.api [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 874.455197] env[63028]: value = "task-2735727" [ 874.455197] env[63028]: _type = "Task" [ 874.455197] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.466172] env[63028]: DEBUG oslo_vmware.api [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735727, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.551907] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735724, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.851756} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.556105] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 3b90dbb8-66ce-435f-beae-5464720bfb3e/3b90dbb8-66ce-435f-beae-5464720bfb3e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 874.556353] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 874.556826] env[63028]: INFO nova.compute.manager [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Took 43.87 seconds to build instance. [ 874.560030] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb99ec61-8fb1-4689-a7b9-e03383a997aa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.566687] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 874.566687] env[63028]: value = "task-2735728" [ 874.566687] env[63028]: _type = "Task" [ 874.566687] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.576758] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735728, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.708909] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735725, 'name': ReconfigVM_Task, 'duration_secs': 0.711622} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.709167] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Reconfigured VM instance instance-00000047 to attach disk [datastore1] f0ca0d73-d428-4b8c-acac-a80b7b7dd793/f0ca0d73-d428-4b8c-acac-a80b7b7dd793.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 874.709833] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96253a7a-5a41-42c2-b833-c1f442cb6b01 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.716009] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 874.716009] env[63028]: value = "task-2735729" [ 874.716009] env[63028]: _type = "Task" [ 874.716009] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.724832] env[63028]: DEBUG nova.scheduler.client.report [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 874.727936] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735729, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.739037] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 874.739037] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 874.739037] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 874.739294] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 874.739449] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 874.739597] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 874.739800] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 874.739957] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 874.740512] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 874.740728] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 874.740914] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 874.746733] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6aecc128-c892-426c-891c-e94ffc8f0b40 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.763120] env[63028]: DEBUG oslo_vmware.api [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 874.763120] env[63028]: value = "task-2735730" [ 874.763120] env[63028]: _type = "Task" [ 874.763120] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.773929] env[63028]: DEBUG oslo_vmware.api [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735730, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.965628] env[63028]: DEBUG oslo_vmware.api [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735727, 'name': PowerOffVM_Task, 'duration_secs': 0.338877} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.965886] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.966071] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.966328] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10459874-46af-4da6-a548-ea814d89b2f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.017422] env[63028]: DEBUG nova.compute.manager [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 875.044044] env[63028]: DEBUG nova.virt.hardware [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 875.044307] env[63028]: DEBUG nova.virt.hardware [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 875.044467] env[63028]: DEBUG nova.virt.hardware [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 875.044649] env[63028]: DEBUG nova.virt.hardware [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 875.044866] env[63028]: DEBUG nova.virt.hardware [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 875.045053] env[63028]: DEBUG nova.virt.hardware [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 875.045278] env[63028]: DEBUG nova.virt.hardware [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 875.045459] env[63028]: DEBUG nova.virt.hardware [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 875.045631] env[63028]: DEBUG nova.virt.hardware [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 875.045797] env[63028]: DEBUG nova.virt.hardware [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 875.046052] env[63028]: DEBUG nova.virt.hardware [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 875.046909] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8a5862-107f-4e98-ad6c-e33860f8df04 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.054895] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca6daee-7f67-4ca1-9711-ce5f843cca95 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.070354] env[63028]: DEBUG oslo_concurrency.lockutils [None req-477590c7-7359-44a5-a332-6d0b0fe72ca7 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "1d008794-3c1a-46c6-b4eb-3d5441efdb22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.661s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.078730] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735728, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.156126} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.079065] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.079938] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512acffc-46f8-4828-ade1-668ba933782b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.102902] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 3b90dbb8-66ce-435f-beae-5464720bfb3e/3b90dbb8-66ce-435f-beae-5464720bfb3e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.103233] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6df8d3d-0dce-476a-893a-eb6c71775793 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.122748] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 875.122748] env[63028]: value = "task-2735732" [ 875.122748] env[63028]: _type = "Task" [ 875.122748] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.133412] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735732, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.225353] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735729, 'name': Rename_Task, 'duration_secs': 0.193519} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.225678] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 875.225953] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9681b9b-e159-4d93-9a94-5ae161c073dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.229461] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.235s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.232716] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.858s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.234214] env[63028]: INFO nova.compute.claims [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.237237] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 875.237237] env[63028]: value = "task-2735733" [ 875.237237] env[63028]: _type = "Task" [ 875.237237] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.247885] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735733, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.261714] env[63028]: INFO nova.scheduler.client.report [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted allocations for instance 70888889-4965-47ab-ad47-59f1c1286bd8 [ 875.278600] env[63028]: DEBUG oslo_vmware.api [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735730, 'name': ReconfigVM_Task, 'duration_secs': 0.368504} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.278600] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance 'c06813c4-472d-4bf9-84ec-0d01306bcd48' progress to 33 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 875.388486] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 875.388642] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 875.388935] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleting the datastore file [datastore1] 9773ad95-1894-471d-8020-c7952eac4be4 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 875.389310] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8c2b696-529b-4b7b-abad-6c9572643995 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.396604] env[63028]: DEBUG oslo_vmware.api [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 875.396604] env[63028]: value = "task-2735734" [ 875.396604] env[63028]: _type = "Task" [ 875.396604] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.404606] env[63028]: DEBUG oslo_vmware.api [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735734, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.639423] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735732, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.706057] env[63028]: DEBUG nova.compute.manager [req-050dfc66-d869-4349-a544-9fda593a94db req-28c6542c-a0f3-4c9a-8ac8-691ecaa2a6ed service nova] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Received event network-vif-plugged-e8d9e159-a748-455b-ba7e-dad129aab175 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 875.706303] env[63028]: DEBUG oslo_concurrency.lockutils [req-050dfc66-d869-4349-a544-9fda593a94db req-28c6542c-a0f3-4c9a-8ac8-691ecaa2a6ed service nova] Acquiring lock "a2f7d7c6-7931-4b21-a29c-bb9965577210-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.706525] env[63028]: DEBUG oslo_concurrency.lockutils [req-050dfc66-d869-4349-a544-9fda593a94db req-28c6542c-a0f3-4c9a-8ac8-691ecaa2a6ed service nova] Lock "a2f7d7c6-7931-4b21-a29c-bb9965577210-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.706694] env[63028]: DEBUG oslo_concurrency.lockutils [req-050dfc66-d869-4349-a544-9fda593a94db req-28c6542c-a0f3-4c9a-8ac8-691ecaa2a6ed service nova] Lock "a2f7d7c6-7931-4b21-a29c-bb9965577210-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.706854] env[63028]: DEBUG nova.compute.manager [req-050dfc66-d869-4349-a544-9fda593a94db req-28c6542c-a0f3-4c9a-8ac8-691ecaa2a6ed service nova] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] No waiting events found dispatching network-vif-plugged-e8d9e159-a748-455b-ba7e-dad129aab175 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 875.707027] env[63028]: WARNING nova.compute.manager [req-050dfc66-d869-4349-a544-9fda593a94db req-28c6542c-a0f3-4c9a-8ac8-691ecaa2a6ed service nova] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Received unexpected event network-vif-plugged-e8d9e159-a748-455b-ba7e-dad129aab175 for instance with vm_state building and task_state spawning. [ 875.754929] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735733, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.774910] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1f44d9d-ffc6-445a-8ea2-fd14b45f2d79 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "70888889-4965-47ab-ad47-59f1c1286bd8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.868s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.790290] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 875.790290] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 875.790290] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 875.790290] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 875.790290] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 875.790290] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 875.790290] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 875.790290] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 875.790290] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 875.790290] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 875.790290] env[63028]: DEBUG nova.virt.hardware [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 875.796117] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Reconfiguring VM instance instance-00000029 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 875.797514] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7236d3d2-0058-4cda-affa-0f171ef7c474 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.821591] env[63028]: DEBUG oslo_vmware.api [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 875.821591] env[63028]: value = "task-2735735" [ 875.821591] env[63028]: _type = "Task" [ 875.821591] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.832901] env[63028]: DEBUG oslo_vmware.api [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735735, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.834793] env[63028]: DEBUG nova.network.neutron [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Successfully updated port: e8d9e159-a748-455b-ba7e-dad129aab175 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 875.906706] env[63028]: DEBUG oslo_vmware.api [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735734, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263957} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.910041] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.910041] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.910041] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.910041] env[63028]: INFO nova.compute.manager [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Took 1.47 seconds to destroy the instance on the hypervisor. [ 875.910041] env[63028]: DEBUG oslo.service.loopingcall [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 875.910041] env[63028]: DEBUG nova.compute.manager [-] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 875.910041] env[63028]: DEBUG nova.network.neutron [-] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 876.135769] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735732, 'name': ReconfigVM_Task, 'duration_secs': 0.552224} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.136118] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 3b90dbb8-66ce-435f-beae-5464720bfb3e/3b90dbb8-66ce-435f-beae-5464720bfb3e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.136933] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86e81094-ff0e-4c90-b194-4abe1a4bda95 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.144522] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 876.144522] env[63028]: value = "task-2735736" [ 876.144522] env[63028]: _type = "Task" [ 876.144522] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.153216] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735736, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.250802] env[63028]: DEBUG oslo_vmware.api [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735733, 'name': PowerOnVM_Task, 'duration_secs': 0.747573} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.251328] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 876.251530] env[63028]: INFO nova.compute.manager [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Took 10.48 seconds to spawn the instance on the hypervisor. [ 876.251710] env[63028]: DEBUG nova.compute.manager [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 876.252757] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32c214b-7389-4e14-8bfa-ca56cfd0def7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.338628] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "refresh_cache-a2f7d7c6-7931-4b21-a29c-bb9965577210" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.338628] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquired lock "refresh_cache-a2f7d7c6-7931-4b21-a29c-bb9965577210" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.338628] env[63028]: DEBUG nova.network.neutron [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 876.339380] env[63028]: DEBUG oslo_vmware.api [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735735, 'name': ReconfigVM_Task, 'duration_secs': 0.329635} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.340205] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Reconfigured VM instance instance-00000029 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 876.342282] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e87b86d-1d9e-4996-b2d8-069df73125f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.367479] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] c06813c4-472d-4bf9-84ec-0d01306bcd48/c06813c4-472d-4bf9-84ec-0d01306bcd48.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 876.371184] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95db0a07-8d87-4d1c-9f0d-06081e7c1b5d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.391293] env[63028]: DEBUG oslo_vmware.api [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 876.391293] env[63028]: value = "task-2735737" [ 876.391293] env[63028]: _type = "Task" [ 876.391293] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.403336] env[63028]: DEBUG oslo_vmware.api [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735737, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.655938] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735736, 'name': Rename_Task, 'duration_secs': 0.199164} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.656866] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 876.656945] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31db4594-858d-4129-8fb6-2a4c3b781053 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.663136] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 876.663136] env[63028]: value = "task-2735738" [ 876.663136] env[63028]: _type = "Task" [ 876.663136] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.671308] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735738, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.706483] env[63028]: DEBUG nova.network.neutron [-] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.777498] env[63028]: INFO nova.compute.manager [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Took 44.13 seconds to build instance. [ 876.820796] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b393eda2-a2f5-411d-b7ef-07664f36b8e9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.832666] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a28b583-db87-4dc6-9d81-0134ba4ef057 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.868157] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7397e82-be4d-4ad7-b544-8e7144eb520f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.879020] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84863291-e7a9-4640-a5db-1dc03241d158 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.894385] env[63028]: DEBUG nova.compute.provider_tree [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.905705] env[63028]: DEBUG oslo_vmware.api [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735737, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.908417] env[63028]: DEBUG nova.network.neutron [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.097599] env[63028]: DEBUG nova.network.neutron [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Updating instance_info_cache with network_info: [{"id": "e8d9e159-a748-455b-ba7e-dad129aab175", "address": "fa:16:3e:f5:22:50", "network": {"id": "103ef17f-0425-48ae-98df-250d0d0dd66a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-13124710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b7c1be3c4343a4b4f288a355170873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8d9e159-a7", "ovs_interfaceid": "e8d9e159-a748-455b-ba7e-dad129aab175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.174454] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735738, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.209638] env[63028]: INFO nova.compute.manager [-] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Took 1.30 seconds to deallocate network for instance. [ 877.282682] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50c05c22-9a17-46a2-aa9e-4dd4c15fda05 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "f0ca0d73-d428-4b8c-acac-a80b7b7dd793" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.239s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.400730] env[63028]: DEBUG nova.scheduler.client.report [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 877.408093] env[63028]: DEBUG oslo_vmware.api [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735737, 'name': ReconfigVM_Task, 'duration_secs': 0.575227} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.408093] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Reconfigured VM instance instance-00000029 to attach disk [datastore2] c06813c4-472d-4bf9-84ec-0d01306bcd48/c06813c4-472d-4bf9-84ec-0d01306bcd48.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 877.408093] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance 'c06813c4-472d-4bf9-84ec-0d01306bcd48' progress to 50 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 877.600922] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Releasing lock "refresh_cache-a2f7d7c6-7931-4b21-a29c-bb9965577210" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.601358] env[63028]: DEBUG nova.compute.manager [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Instance network_info: |[{"id": "e8d9e159-a748-455b-ba7e-dad129aab175", "address": "fa:16:3e:f5:22:50", "network": {"id": "103ef17f-0425-48ae-98df-250d0d0dd66a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-13124710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b7c1be3c4343a4b4f288a355170873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8d9e159-a7", "ovs_interfaceid": "e8d9e159-a748-455b-ba7e-dad129aab175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 877.601802] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:22:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3261e15f-7e45-4516-acfd-341bab16e3cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8d9e159-a748-455b-ba7e-dad129aab175', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 877.609276] env[63028]: DEBUG oslo.service.loopingcall [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 877.609416] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 877.610021] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3acad48-e02e-4f59-898e-22a153984506 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.629467] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 877.629467] env[63028]: value = "task-2735739" [ 877.629467] env[63028]: _type = "Task" [ 877.629467] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.637177] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735739, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.673628] env[63028]: DEBUG oslo_vmware.api [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735738, 'name': PowerOnVM_Task, 'duration_secs': 0.917408} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.674501] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.674721] env[63028]: INFO nova.compute.manager [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Took 9.13 seconds to spawn the instance on the hypervisor. [ 877.674950] env[63028]: DEBUG nova.compute.manager [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 877.675798] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871ac94a-7f45-4537-82a8-a77106e87ee3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.718110] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.747969] env[63028]: DEBUG nova.compute.manager [req-07fbecf0-7e24-4332-903e-06778220e2b6 req-ea4e55e1-c1cf-4f08-8845-ea7c36dc5413 service nova] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Received event network-changed-e8d9e159-a748-455b-ba7e-dad129aab175 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 877.748218] env[63028]: DEBUG nova.compute.manager [req-07fbecf0-7e24-4332-903e-06778220e2b6 req-ea4e55e1-c1cf-4f08-8845-ea7c36dc5413 service nova] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Refreshing instance network info cache due to event network-changed-e8d9e159-a748-455b-ba7e-dad129aab175. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 877.748448] env[63028]: DEBUG oslo_concurrency.lockutils [req-07fbecf0-7e24-4332-903e-06778220e2b6 req-ea4e55e1-c1cf-4f08-8845-ea7c36dc5413 service nova] Acquiring lock "refresh_cache-a2f7d7c6-7931-4b21-a29c-bb9965577210" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.749514] env[63028]: DEBUG oslo_concurrency.lockutils [req-07fbecf0-7e24-4332-903e-06778220e2b6 req-ea4e55e1-c1cf-4f08-8845-ea7c36dc5413 service nova] Acquired lock "refresh_cache-a2f7d7c6-7931-4b21-a29c-bb9965577210" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.750898] env[63028]: DEBUG nova.network.neutron [req-07fbecf0-7e24-4332-903e-06778220e2b6 req-ea4e55e1-c1cf-4f08-8845-ea7c36dc5413 service nova] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Refreshing network info cache for port e8d9e159-a748-455b-ba7e-dad129aab175 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 877.781512] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.781512] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.908595] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.676s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.909156] env[63028]: DEBUG nova.compute.manager [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 877.912170] env[63028]: DEBUG oslo_concurrency.lockutils [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.446s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.912360] env[63028]: DEBUG nova.objects.instance [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63028) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 877.917205] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47da428e-fac9-41af-b4b6-09ea37325df3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.937867] env[63028]: INFO nova.compute.manager [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Rescuing [ 877.938195] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "refresh_cache-f0ca0d73-d428-4b8c-acac-a80b7b7dd793" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.938359] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "refresh_cache-f0ca0d73-d428-4b8c-acac-a80b7b7dd793" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.938542] env[63028]: DEBUG nova.network.neutron [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 877.940428] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45416f2-cc93-4fab-b16c-b385565db00c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.966043] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance 'c06813c4-472d-4bf9-84ec-0d01306bcd48' progress to 67 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 878.139819] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735739, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.194861] env[63028]: INFO nova.compute.manager [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Took 42.03 seconds to build instance. [ 878.282906] env[63028]: DEBUG nova.compute.manager [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 878.418111] env[63028]: DEBUG nova.compute.utils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 878.426982] env[63028]: DEBUG nova.compute.manager [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 878.426982] env[63028]: DEBUG nova.network.neutron [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 878.509482] env[63028]: DEBUG nova.network.neutron [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Port e9be02f8-7ea6-45eb-a1cb-65fb95285caf binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 878.519731] env[63028]: DEBUG nova.policy [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88001cd873b841918c7849408e98ac7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98d3fdfda1694b2f9f5985831ea77a21', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 878.606965] env[63028]: DEBUG nova.network.neutron [req-07fbecf0-7e24-4332-903e-06778220e2b6 req-ea4e55e1-c1cf-4f08-8845-ea7c36dc5413 service nova] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Updated VIF entry in instance network info cache for port e8d9e159-a748-455b-ba7e-dad129aab175. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 878.607362] env[63028]: DEBUG nova.network.neutron [req-07fbecf0-7e24-4332-903e-06778220e2b6 req-ea4e55e1-c1cf-4f08-8845-ea7c36dc5413 service nova] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Updating instance_info_cache with network_info: [{"id": "e8d9e159-a748-455b-ba7e-dad129aab175", "address": "fa:16:3e:f5:22:50", "network": {"id": "103ef17f-0425-48ae-98df-250d0d0dd66a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-13124710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b7c1be3c4343a4b4f288a355170873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8d9e159-a7", "ovs_interfaceid": "e8d9e159-a748-455b-ba7e-dad129aab175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.640338] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735739, 'name': CreateVM_Task, 'duration_secs': 0.512599} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.640551] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.641311] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.641371] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.641690] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 878.641944] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d28b3b1-f345-40ef-80cb-9882725b50d3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.646858] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 878.646858] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b21f03-4a86-46bb-6749-ac8352736534" [ 878.646858] env[63028]: _type = "Task" [ 878.646858] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.657976] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b21f03-4a86-46bb-6749-ac8352736534, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.698347] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c1571098-c3e1-4a86-83aa-080ecc8a35f5 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "3b90dbb8-66ce-435f-beae-5464720bfb3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.515s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.811960] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.886878] env[63028]: DEBUG nova.network.neutron [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Updating instance_info_cache with network_info: [{"id": "f5e9674b-220d-4e59-bec7-5b1512efb29e", "address": "fa:16:3e:99:86:70", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5e9674b-22", "ovs_interfaceid": "f5e9674b-220d-4e59-bec7-5b1512efb29e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.925920] env[63028]: DEBUG nova.compute.manager [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 878.932937] env[63028]: DEBUG oslo_concurrency.lockutils [None req-541b8fc7-3b9c-4592-9ae6-6bc4eccb0a4c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.938277] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.798s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.938277] env[63028]: DEBUG nova.objects.instance [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63028) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 879.067798] env[63028]: DEBUG nova.network.neutron [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Successfully created port: 88cffe20-d5e9-44e6-8180-39722a305d2e {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.109633] env[63028]: DEBUG oslo_concurrency.lockutils [req-07fbecf0-7e24-4332-903e-06778220e2b6 req-ea4e55e1-c1cf-4f08-8845-ea7c36dc5413 service nova] Releasing lock "refresh_cache-a2f7d7c6-7931-4b21-a29c-bb9965577210" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.109903] env[63028]: DEBUG nova.compute.manager [req-07fbecf0-7e24-4332-903e-06778220e2b6 req-ea4e55e1-c1cf-4f08-8845-ea7c36dc5413 service nova] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Received event network-vif-deleted-995d1950-1169-43bf-8afe-427bdcb37b9d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 879.159593] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b21f03-4a86-46bb-6749-ac8352736534, 'name': SearchDatastore_Task, 'duration_secs': 0.026532} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.159879] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.160117] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 879.160417] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.160581] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.160764] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.161020] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3db814e-a776-43d6-a08c-ef90fb6a90e1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.173474] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.173594] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 879.174312] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9390111-a865-41a3-9d72-2bd851ad5282 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.179071] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 879.179071] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e1eb12-d940-7fea-3b32-6461fe84e048" [ 879.179071] env[63028]: _type = "Task" [ 879.179071] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.187648] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e1eb12-d940-7fea-3b32-6461fe84e048, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.388574] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "refresh_cache-f0ca0d73-d428-4b8c-acac-a80b7b7dd793" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.535557] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "c06813c4-472d-4bf9-84ec-0d01306bcd48-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.535807] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.536138] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.692081] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e1eb12-d940-7fea-3b32-6461fe84e048, 'name': SearchDatastore_Task, 'duration_secs': 0.035347} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.692845] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25adf441-8d62-4f86-9625-96e16608b372 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.698456] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 879.698456] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d42842-d22e-b42f-7f5d-214596017186" [ 879.698456] env[63028]: _type = "Task" [ 879.698456] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.706409] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d42842-d22e-b42f-7f5d-214596017186, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.944804] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0af539be-ae50-4675-9cfd-f11dbcbf3335 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.946699] env[63028]: DEBUG nova.compute.manager [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 879.948841] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.898s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.949081] env[63028]: DEBUG nova.objects.instance [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lazy-loading 'resources' on Instance uuid 514c83d1-4fb1-435c-8c25-aa112c744131 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.974355] env[63028]: DEBUG nova.virt.hardware [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 879.974597] env[63028]: DEBUG nova.virt.hardware [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 879.974753] env[63028]: DEBUG nova.virt.hardware [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 879.974933] env[63028]: DEBUG nova.virt.hardware [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 879.975113] env[63028]: DEBUG nova.virt.hardware [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 879.975270] env[63028]: DEBUG nova.virt.hardware [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 879.975482] env[63028]: DEBUG nova.virt.hardware [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 879.975641] env[63028]: DEBUG nova.virt.hardware [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 879.975809] env[63028]: DEBUG nova.virt.hardware [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 879.975969] env[63028]: DEBUG nova.virt.hardware [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 879.976157] env[63028]: DEBUG nova.virt.hardware [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 879.977115] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c501da27-bc2a-4655-9016-c946b06c6932 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.986504] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de66b43-445b-430f-bc90-cf3f985f7704 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.067089] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "3b90dbb8-66ce-435f-beae-5464720bfb3e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.067387] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "3b90dbb8-66ce-435f-beae-5464720bfb3e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.211064] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d42842-d22e-b42f-7f5d-214596017186, 'name': SearchDatastore_Task, 'duration_secs': 0.022516} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.211341] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.211594] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] a2f7d7c6-7931-4b21-a29c-bb9965577210/a2f7d7c6-7931-4b21-a29c-bb9965577210.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 880.211855] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9d2bdbe-dbaf-4283-8c51-37f2ea5e2eb9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.218336] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 880.218336] env[63028]: value = "task-2735740" [ 880.218336] env[63028]: _type = "Task" [ 880.218336] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.225688] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735740, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.570864] env[63028]: DEBUG nova.compute.utils [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 880.592200] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.592421] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.592601] env[63028]: DEBUG nova.network.neutron [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 880.724223] env[63028]: DEBUG nova.network.neutron [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Successfully updated port: 88cffe20-d5e9-44e6-8180-39722a305d2e {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 880.731720] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735740, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485653} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.734059] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] a2f7d7c6-7931-4b21-a29c-bb9965577210/a2f7d7c6-7931-4b21-a29c-bb9965577210.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 880.734868] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 880.735062] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-251ea019-5e7d-4dc0-b98b-5770dece6b30 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.742754] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 880.742754] env[63028]: value = "task-2735741" [ 880.742754] env[63028]: _type = "Task" [ 880.742754] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.755747] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735741, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.870775] env[63028]: DEBUG nova.compute.manager [req-a0886ca0-18f7-4afa-b634-16b759d2a536 req-d606e800-8911-40c9-964d-af935731d503 service nova] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Received event network-vif-plugged-88cffe20-d5e9-44e6-8180-39722a305d2e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 880.870944] env[63028]: DEBUG oslo_concurrency.lockutils [req-a0886ca0-18f7-4afa-b634-16b759d2a536 req-d606e800-8911-40c9-964d-af935731d503 service nova] Acquiring lock "70147f2f-0b5e-4343-84e4-8bc195a5485d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.871231] env[63028]: DEBUG oslo_concurrency.lockutils [req-a0886ca0-18f7-4afa-b634-16b759d2a536 req-d606e800-8911-40c9-964d-af935731d503 service nova] Lock "70147f2f-0b5e-4343-84e4-8bc195a5485d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.871365] env[63028]: DEBUG oslo_concurrency.lockutils [req-a0886ca0-18f7-4afa-b634-16b759d2a536 req-d606e800-8911-40c9-964d-af935731d503 service nova] Lock "70147f2f-0b5e-4343-84e4-8bc195a5485d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.871531] env[63028]: DEBUG nova.compute.manager [req-a0886ca0-18f7-4afa-b634-16b759d2a536 req-d606e800-8911-40c9-964d-af935731d503 service nova] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] No waiting events found dispatching network-vif-plugged-88cffe20-d5e9-44e6-8180-39722a305d2e {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 880.871706] env[63028]: WARNING nova.compute.manager [req-a0886ca0-18f7-4afa-b634-16b759d2a536 req-d606e800-8911-40c9-964d-af935731d503 service nova] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Received unexpected event network-vif-plugged-88cffe20-d5e9-44e6-8180-39722a305d2e for instance with vm_state building and task_state spawning. [ 880.925492] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 880.925968] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6ee00e2-9d48-490f-838f-2b28837f0104 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.933663] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 880.933663] env[63028]: value = "task-2735742" [ 880.933663] env[63028]: _type = "Task" [ 880.933663] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.946973] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735742, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.996542] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29ce933-dac8-4c17-9174-1e1cc08b3728 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.005197] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca238c8-21c7-489f-9fe5-a6f8686a3a95 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.037387] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6179dd-c178-4aee-b959-81d9f127ef03 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.046007] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee662d3a-08e3-460a-889d-c760d890aa75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.059767] env[63028]: DEBUG nova.compute.provider_tree [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.077285] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "3b90dbb8-66ce-435f-beae-5464720bfb3e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.228189] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "refresh_cache-70147f2f-0b5e-4343-84e4-8bc195a5485d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.228189] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired lock "refresh_cache-70147f2f-0b5e-4343-84e4-8bc195a5485d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.228189] env[63028]: DEBUG nova.network.neutron [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.252310] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735741, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061032} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.252582] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 881.253394] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0060d0-5389-4138-bad7-0f808d073892 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.279871] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] a2f7d7c6-7931-4b21-a29c-bb9965577210/a2f7d7c6-7931-4b21-a29c-bb9965577210.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.280585] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48fa939b-5efd-4722-956b-6b05a52d409f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.310102] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 881.310102] env[63028]: value = "task-2735743" [ 881.310102] env[63028]: _type = "Task" [ 881.310102] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.321457] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735743, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.382043] env[63028]: DEBUG nova.network.neutron [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance_info_cache with network_info: [{"id": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "address": "fa:16:3e:cc:b1:42", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9be02f8-7e", "ovs_interfaceid": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.444122] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735742, 'name': PowerOffVM_Task, 'duration_secs': 0.490305} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.444403] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 881.445269] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d2cca4-edb4-4298-8d7d-d63f281471f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.463313] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f38a3d-07fc-446f-9850-7ad4ded58ca6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.496964] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 881.497320] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35c36677-1473-4427-a8ca-b8305425f796 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.504211] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 881.504211] env[63028]: value = "task-2735744" [ 881.504211] env[63028]: _type = "Task" [ 881.504211] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.511676] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735744, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.563383] env[63028]: DEBUG nova.scheduler.client.report [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 881.758875] env[63028]: DEBUG nova.network.neutron [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 881.822672] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735743, 'name': ReconfigVM_Task, 'duration_secs': 0.311797} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.822933] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Reconfigured VM instance instance-00000049 to attach disk [datastore2] a2f7d7c6-7931-4b21-a29c-bb9965577210/a2f7d7c6-7931-4b21-a29c-bb9965577210.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.823574] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d9d1740a-935a-4984-90b3-6b7c4f3e199a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.829926] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 881.829926] env[63028]: value = "task-2735745" [ 881.829926] env[63028]: _type = "Task" [ 881.829926] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.839462] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735745, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.887270] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.941072] env[63028]: DEBUG nova.network.neutron [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Updating instance_info_cache with network_info: [{"id": "88cffe20-d5e9-44e6-8180-39722a305d2e", "address": "fa:16:3e:63:52:4a", "network": {"id": "37013470-5bda-41a6-826c-03ac0d423c85", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1214578902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d3fdfda1694b2f9f5985831ea77a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88cffe20-d5", "ovs_interfaceid": "88cffe20-d5e9-44e6-8180-39722a305d2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.015046] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 882.015046] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.015245] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.015245] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.015383] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.015669] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6eba7e4-3431-4253-91af-dd4b95516dba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.023920] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.024037] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 882.024728] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56f95504-d715-41ae-b4c1-c381e3614d4a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.029849] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 882.029849] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5258c49f-ec6b-57fa-a3fe-fc6f475abb61" [ 882.029849] env[63028]: _type = "Task" [ 882.029849] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.037385] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5258c49f-ec6b-57fa-a3fe-fc6f475abb61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.068659] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.119s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.070839] env[63028]: DEBUG oslo_concurrency.lockutils [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.890s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.071103] env[63028]: DEBUG nova.objects.instance [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lazy-loading 'resources' on Instance uuid 56d39801-f3e7-4cfe-a038-6a5e762bfda8 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 882.091127] env[63028]: INFO nova.scheduler.client.report [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted allocations for instance 514c83d1-4fb1-435c-8c25-aa112c744131 [ 882.145687] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "3b90dbb8-66ce-435f-beae-5464720bfb3e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.145916] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "3b90dbb8-66ce-435f-beae-5464720bfb3e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.146417] env[63028]: INFO nova.compute.manager [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Attaching volume 0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0 to /dev/sdb [ 882.182994] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869bf0f8-f54a-418f-8b71-7cf81dd14ed9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.190279] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faeeee9f-06ed-462e-a003-7d7594344b9f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.203287] env[63028]: DEBUG nova.virt.block_device [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Updating existing volume attachment record: 36d48735-48f7-409a-a719-58391906aa7c {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 882.339605] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735745, 'name': Rename_Task, 'duration_secs': 0.171799} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.340056] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 882.340219] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d8fb883-f283-467f-9c71-d8eba3372f17 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.346361] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 882.346361] env[63028]: value = "task-2735747" [ 882.346361] env[63028]: _type = "Task" [ 882.346361] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.356390] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735747, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.406098] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8bec7a-3978-49e5-85ba-16d441a93fb1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.424197] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c9903d-1a43-4a5d-b814-191b80d248e4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.431406] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance 'c06813c4-472d-4bf9-84ec-0d01306bcd48' progress to 83 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 882.445815] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Releasing lock "refresh_cache-70147f2f-0b5e-4343-84e4-8bc195a5485d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.446135] env[63028]: DEBUG nova.compute.manager [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Instance network_info: |[{"id": "88cffe20-d5e9-44e6-8180-39722a305d2e", "address": "fa:16:3e:63:52:4a", "network": {"id": "37013470-5bda-41a6-826c-03ac0d423c85", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1214578902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d3fdfda1694b2f9f5985831ea77a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88cffe20-d5", "ovs_interfaceid": "88cffe20-d5e9-44e6-8180-39722a305d2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 882.446527] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:52:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '88cffe20-d5e9-44e6-8180-39722a305d2e', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.453933] env[63028]: DEBUG oslo.service.loopingcall [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 882.454468] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 882.454696] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb18c490-ec4f-4033-b298-afb8df9d8962 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.473453] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.473453] env[63028]: value = "task-2735748" [ 882.473453] env[63028]: _type = "Task" [ 882.473453] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.482274] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735748, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.540942] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5258c49f-ec6b-57fa-a3fe-fc6f475abb61, 'name': SearchDatastore_Task, 'duration_secs': 0.008564} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.541826] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-089aeaa9-a0a4-4b60-8cb9-d50f9567e17a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.548757] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 882.548757] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ebe795-e29f-8344-7ea2-7207a8bc12e9" [ 882.548757] env[63028]: _type = "Task" [ 882.548757] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.559664] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ebe795-e29f-8344-7ea2-7207a8bc12e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.601376] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e73039fe-033f-4adc-a8b7-958d7706819e tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "514c83d1-4fb1-435c-8c25-aa112c744131" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.079s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.856879] env[63028]: DEBUG oslo_vmware.api [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735747, 'name': PowerOnVM_Task, 'duration_secs': 0.466413} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.859879] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 882.860142] env[63028]: INFO nova.compute.manager [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Took 7.84 seconds to spawn the instance on the hypervisor. [ 882.860378] env[63028]: DEBUG nova.compute.manager [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 882.861489] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a2cb15-7e66-4ec4-9bbd-bc5585da68da {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.899189] env[63028]: DEBUG nova.compute.manager [req-a8675245-caf5-4068-ab5f-d26b3363eaa1 req-c05107c7-f16b-4ec7-ba47-2f2fb9217485 service nova] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Received event network-changed-88cffe20-d5e9-44e6-8180-39722a305d2e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 882.899189] env[63028]: DEBUG nova.compute.manager [req-a8675245-caf5-4068-ab5f-d26b3363eaa1 req-c05107c7-f16b-4ec7-ba47-2f2fb9217485 service nova] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Refreshing instance network info cache due to event network-changed-88cffe20-d5e9-44e6-8180-39722a305d2e. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 882.899189] env[63028]: DEBUG oslo_concurrency.lockutils [req-a8675245-caf5-4068-ab5f-d26b3363eaa1 req-c05107c7-f16b-4ec7-ba47-2f2fb9217485 service nova] Acquiring lock "refresh_cache-70147f2f-0b5e-4343-84e4-8bc195a5485d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.899189] env[63028]: DEBUG oslo_concurrency.lockutils [req-a8675245-caf5-4068-ab5f-d26b3363eaa1 req-c05107c7-f16b-4ec7-ba47-2f2fb9217485 service nova] Acquired lock "refresh_cache-70147f2f-0b5e-4343-84e4-8bc195a5485d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.899189] env[63028]: DEBUG nova.network.neutron [req-a8675245-caf5-4068-ab5f-d26b3363eaa1 req-c05107c7-f16b-4ec7-ba47-2f2fb9217485 service nova] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Refreshing network info cache for port 88cffe20-d5e9-44e6-8180-39722a305d2e {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 882.940961] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c4971efa-4f64-4aab-bb96-ae0781bd593b tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance 'c06813c4-472d-4bf9-84ec-0d01306bcd48' progress to 100 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 882.983655] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735748, 'name': CreateVM_Task, 'duration_secs': 0.351722} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.986325] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 882.987892] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.987987] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.988306] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 882.988798] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cf964bc-e2f6-4590-a289-3c43ce28e25f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.993634] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 882.993634] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520f5323-7efc-52ab-a0e7-a859244f8d6b" [ 882.993634] env[63028]: _type = "Task" [ 882.993634] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.007055] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520f5323-7efc-52ab-a0e7-a859244f8d6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.058410] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ebe795-e29f-8344-7ea2-7207a8bc12e9, 'name': SearchDatastore_Task, 'duration_secs': 0.01117} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.061159] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.061474] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] f0ca0d73-d428-4b8c-acac-a80b7b7dd793/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk. {{(pid=63028) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 883.062402] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5b24702-b673-45ca-8638-7c05a90ff1cb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.070215] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 883.070215] env[63028]: value = "task-2735751" [ 883.070215] env[63028]: _type = "Task" [ 883.070215] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.082093] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735751, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.094760] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba71d8f-324a-4939-aaa8-abdf61f987de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.103080] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7589382a-acbc-4e3a-87a1-d26d2a867fe6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.135897] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625d3a89-a470-4ec7-82c9-e83fe19e92f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.144593] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc72e65-4f10-4baa-9c04-270014f900f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.159576] env[63028]: DEBUG nova.compute.provider_tree [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 883.383230] env[63028]: INFO nova.compute.manager [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Took 43.10 seconds to build instance. [ 883.508829] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520f5323-7efc-52ab-a0e7-a859244f8d6b, 'name': SearchDatastore_Task, 'duration_secs': 0.016392} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.509216] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.509486] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 883.509752] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.509960] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.510173] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 883.510582] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1dadabf7-a8a9-4d5b-976e-c662919f0248 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.520226] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.520330] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 883.521153] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2aecebf5-89d2-4e3e-b645-f06d2986518e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.526549] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 883.526549] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525df439-65c2-8d4d-27c1-f41a3cb85f8c" [ 883.526549] env[63028]: _type = "Task" [ 883.526549] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.535140] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525df439-65c2-8d4d-27c1-f41a3cb85f8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.579828] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735751, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.634579] env[63028]: DEBUG nova.network.neutron [req-a8675245-caf5-4068-ab5f-d26b3363eaa1 req-c05107c7-f16b-4ec7-ba47-2f2fb9217485 service nova] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Updated VIF entry in instance network info cache for port 88cffe20-d5e9-44e6-8180-39722a305d2e. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 883.634963] env[63028]: DEBUG nova.network.neutron [req-a8675245-caf5-4068-ab5f-d26b3363eaa1 req-c05107c7-f16b-4ec7-ba47-2f2fb9217485 service nova] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Updating instance_info_cache with network_info: [{"id": "88cffe20-d5e9-44e6-8180-39722a305d2e", "address": "fa:16:3e:63:52:4a", "network": {"id": "37013470-5bda-41a6-826c-03ac0d423c85", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1214578902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d3fdfda1694b2f9f5985831ea77a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88cffe20-d5", "ovs_interfaceid": "88cffe20-d5e9-44e6-8180-39722a305d2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.663847] env[63028]: DEBUG nova.scheduler.client.report [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 883.885152] env[63028]: DEBUG oslo_concurrency.lockutils [None req-05595fcf-c280-4b38-9166-e6728566c5d0 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "a2f7d7c6-7931-4b21-a29c-bb9965577210" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.883s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.037163] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525df439-65c2-8d4d-27c1-f41a3cb85f8c, 'name': SearchDatastore_Task, 'duration_secs': 0.017937} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.038160] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4a483ae-513a-42bd-8208-9241c7a729c9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.043016] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 884.043016] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bf543a-d9aa-2c80-9af8-066e2cd49f54" [ 884.043016] env[63028]: _type = "Task" [ 884.043016] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.050156] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bf543a-d9aa-2c80-9af8-066e2cd49f54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.079671] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735751, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595274} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.079923] env[63028]: INFO nova.virt.vmwareapi.ds_util [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] f0ca0d73-d428-4b8c-acac-a80b7b7dd793/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk. [ 884.080670] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5296a4e-501d-4b4d-b746-6460b33258de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.106090] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] f0ca0d73-d428-4b8c-acac-a80b7b7dd793/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.106412] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5adba358-82ca-4f43-b6fb-005bff2ca7aa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.125095] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 884.125095] env[63028]: value = "task-2735752" [ 884.125095] env[63028]: _type = "Task" [ 884.125095] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.133127] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735752, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.137893] env[63028]: DEBUG oslo_concurrency.lockutils [req-a8675245-caf5-4068-ab5f-d26b3363eaa1 req-c05107c7-f16b-4ec7-ba47-2f2fb9217485 service nova] Releasing lock "refresh_cache-70147f2f-0b5e-4343-84e4-8bc195a5485d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.167911] env[63028]: DEBUG oslo_concurrency.lockutils [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.097s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.170284] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.346s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.171877] env[63028]: INFO nova.compute.claims [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 884.192206] env[63028]: INFO nova.scheduler.client.report [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Deleted allocations for instance 56d39801-f3e7-4cfe-a038-6a5e762bfda8 [ 884.267411] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "a2f7d7c6-7931-4b21-a29c-bb9965577210" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.267679] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "a2f7d7c6-7931-4b21-a29c-bb9965577210" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.267884] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "a2f7d7c6-7931-4b21-a29c-bb9965577210-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.268076] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "a2f7d7c6-7931-4b21-a29c-bb9965577210-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.268249] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "a2f7d7c6-7931-4b21-a29c-bb9965577210-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.270542] env[63028]: INFO nova.compute.manager [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Terminating instance [ 884.554085] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bf543a-d9aa-2c80-9af8-066e2cd49f54, 'name': SearchDatastore_Task, 'duration_secs': 0.052503} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.554361] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.554618] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 70147f2f-0b5e-4343-84e4-8bc195a5485d/70147f2f-0b5e-4343-84e4-8bc195a5485d.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 884.554901] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32bd43ec-7264-4de1-8f52-affe7bfce306 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.561934] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 884.561934] env[63028]: value = "task-2735753" [ 884.561934] env[63028]: _type = "Task" [ 884.561934] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.569984] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735753, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.635118] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735752, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.704225] env[63028]: DEBUG oslo_concurrency.lockutils [None req-376e64d1-6feb-42d8-98f2-d6ae07252d2d tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "56d39801-f3e7-4cfe-a038-6a5e762bfda8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.984s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.777329] env[63028]: DEBUG nova.compute.manager [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 884.777681] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.778791] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455e3236-7d74-43f4-91d7-93e5fad04e22 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.787459] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.787530] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a63d7505-e574-46f5-92b9-dac80f5bdda0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.795212] env[63028]: DEBUG oslo_vmware.api [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 884.795212] env[63028]: value = "task-2735756" [ 884.795212] env[63028]: _type = "Task" [ 884.795212] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.804894] env[63028]: DEBUG oslo_vmware.api [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735756, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.072114] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735753, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.136553] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735752, 'name': ReconfigVM_Task, 'duration_secs': 0.943948} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.136903] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Reconfigured VM instance instance-00000047 to attach disk [datastore1] f0ca0d73-d428-4b8c-acac-a80b7b7dd793/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.137859] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118bef03-0519-48fc-abff-8d8ff63ec416 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.165576] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cdafd6e-f95d-4e92-97eb-57789bae6df6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.189042] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 885.189042] env[63028]: value = "task-2735757" [ 885.189042] env[63028]: _type = "Task" [ 885.189042] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.199743] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735757, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.306327] env[63028]: DEBUG oslo_vmware.api [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735756, 'name': PowerOffVM_Task, 'duration_secs': 0.216062} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.308983] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.309179] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.309604] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9d37797-7472-4905-8de7-b447b230496b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.370229] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.370439] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.370627] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Deleting the datastore file [datastore2] a2f7d7c6-7931-4b21-a29c-bb9965577210 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.373491] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e45313d9-aaa5-4ac8-b9b3-8c3aad129d63 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.381304] env[63028]: DEBUG oslo_vmware.api [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for the task: (returnval){ [ 885.381304] env[63028]: value = "task-2735759" [ 885.381304] env[63028]: _type = "Task" [ 885.381304] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.390967] env[63028]: DEBUG oslo_vmware.api [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735759, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.571807] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735753, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.677855} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.574787] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 70147f2f-0b5e-4343-84e4-8bc195a5485d/70147f2f-0b5e-4343-84e4-8bc195a5485d.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 885.575089] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 885.575763] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3764765-f575-49c5-9883-06a6e64ab972 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.582143] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 885.582143] env[63028]: value = "task-2735760" [ 885.582143] env[63028]: _type = "Task" [ 885.582143] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.593793] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735760, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.682862] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cfb8bb-fe0a-44ff-a626-54a10a223b84 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.685750] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "c06813c4-472d-4bf9-84ec-0d01306bcd48" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.685948] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.686142] env[63028]: DEBUG nova.compute.manager [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Going to confirm migration 3 {{(pid=63028) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 885.695227] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f033b3-fc39-4d15-94b0-4587526f01c9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.703027] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735757, 'name': ReconfigVM_Task, 'duration_secs': 0.232893} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.726530] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.727885] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1fbf2089-5324-41b5-96e9-4a5d3c833e49 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.730008] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41943b84-b787-4945-8915-aebed16bcb9a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.738366] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437d232e-a9d3-422e-b3c0-4c3910403255 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.743151] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 885.743151] env[63028]: value = "task-2735761" [ 885.743151] env[63028]: _type = "Task" [ 885.743151] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.755755] env[63028]: DEBUG nova.compute.provider_tree [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.761964] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735761, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.890290] env[63028]: DEBUG oslo_vmware.api [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Task: {'id': task-2735759, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178335} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.890534] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.890720] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 885.890895] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 885.891119] env[63028]: INFO nova.compute.manager [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Took 1.11 seconds to destroy the instance on the hypervisor. [ 885.891399] env[63028]: DEBUG oslo.service.loopingcall [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 885.891596] env[63028]: DEBUG nova.compute.manager [-] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 885.891689] env[63028]: DEBUG nova.network.neutron [-] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.092532] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735760, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067826} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.092819] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 886.093646] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552f0671-be2b-4ad2-9ad7-b58fce790585 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.117756] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 70147f2f-0b5e-4343-84e4-8bc195a5485d/70147f2f-0b5e-4343-84e4-8bc195a5485d.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.118123] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c49653f6-f753-4bd4-82c7-72d7f64bf7df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.139392] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 886.139392] env[63028]: value = "task-2735762" [ 886.139392] env[63028]: _type = "Task" [ 886.139392] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.148892] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735762, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.179510] env[63028]: DEBUG nova.compute.manager [req-bd389898-0582-450d-833c-eb21f4412e6b req-2f9cf32e-3762-48a3-a961-f23d9c38cc36 service nova] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Received event network-vif-deleted-e8d9e159-a748-455b-ba7e-dad129aab175 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 886.179799] env[63028]: INFO nova.compute.manager [req-bd389898-0582-450d-833c-eb21f4412e6b req-2f9cf32e-3762-48a3-a961-f23d9c38cc36 service nova] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Neutron deleted interface e8d9e159-a748-455b-ba7e-dad129aab175; detaching it from the instance and deleting it from the info cache [ 886.180037] env[63028]: DEBUG nova.network.neutron [req-bd389898-0582-450d-833c-eb21f4412e6b req-2f9cf32e-3762-48a3-a961-f23d9c38cc36 service nova] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.221976] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.222166] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.222355] env[63028]: DEBUG nova.network.neutron [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.222549] env[63028]: DEBUG nova.objects.instance [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'info_cache' on Instance uuid c06813c4-472d-4bf9-84ec-0d01306bcd48 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.253819] env[63028]: DEBUG oslo_vmware.api [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735761, 'name': PowerOnVM_Task, 'duration_secs': 0.416004} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.254322] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.256892] env[63028]: DEBUG nova.compute.manager [None req-b1d98946-1bac-414d-b354-aa24f213fb3a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 886.257763] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f820a518-77af-409b-9ba9-4ae3e389c9d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.260924] env[63028]: DEBUG nova.scheduler.client.report [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 886.650087] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735762, 'name': ReconfigVM_Task, 'duration_secs': 0.280651} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.650404] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 70147f2f-0b5e-4343-84e4-8bc195a5485d/70147f2f-0b5e-4343-84e4-8bc195a5485d.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 886.651041] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-baf10c36-206c-4c97-80c8-3e9916165c78 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.657494] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 886.657494] env[63028]: value = "task-2735763" [ 886.657494] env[63028]: _type = "Task" [ 886.657494] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.662067] env[63028]: DEBUG nova.network.neutron [-] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.667730] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735763, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.683152] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ac7178b-4f24-4281-b0da-25fbf3c011d8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.694780] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afecd4b-94fe-4fdc-b258-d8d1d0f12bce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.742065] env[63028]: DEBUG nova.compute.manager [req-bd389898-0582-450d-833c-eb21f4412e6b req-2f9cf32e-3762-48a3-a961-f23d9c38cc36 service nova] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Detach interface failed, port_id=e8d9e159-a748-455b-ba7e-dad129aab175, reason: Instance a2f7d7c6-7931-4b21-a29c-bb9965577210 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 886.766988] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.767249] env[63028]: DEBUG nova.compute.manager [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 886.770478] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.884s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.770607] env[63028]: DEBUG nova.objects.instance [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63028) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 887.167563] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735763, 'name': Rename_Task, 'duration_secs': 0.15431} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.167845] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 887.168337] env[63028]: INFO nova.compute.manager [-] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Took 1.28 seconds to deallocate network for instance. [ 887.168554] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de051cd2-4c1d-4b59-b87b-881a1f6daf4e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.175640] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 887.175640] env[63028]: value = "task-2735764" [ 887.175640] env[63028]: _type = "Task" [ 887.175640] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.183274] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735764, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.250212] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 887.250473] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550793', 'volume_id': '0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0', 'name': 'volume-0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3b90dbb8-66ce-435f-beae-5464720bfb3e', 'attached_at': '', 'detached_at': '', 'volume_id': '0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0', 'serial': '0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 887.251351] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48fc733-7cf9-4068-9397-b424d7fbccc1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.269113] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94bebfd8-71ea-4651-b0ab-ecc64b94ef59 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.275442] env[63028]: DEBUG nova.compute.utils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 887.292715] env[63028]: DEBUG nova.compute.manager [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 887.292914] env[63028]: DEBUG nova.network.neutron [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 887.306278] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] volume-0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0/volume-0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.307133] env[63028]: DEBUG nova.compute.manager [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 887.310886] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-860efe80-f933-4840-9a62-ce813775ff19 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.330390] env[63028]: DEBUG oslo_vmware.api [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 887.330390] env[63028]: value = "task-2735765" [ 887.330390] env[63028]: _type = "Task" [ 887.330390] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.339055] env[63028]: DEBUG oslo_vmware.api [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735765, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.377983] env[63028]: DEBUG nova.policy [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b48f3f2a85945379bdb33bf153bde9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25a6457f62d149629c09589feb1a550c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 887.675586] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.685722] env[63028]: DEBUG oslo_vmware.api [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735764, 'name': PowerOnVM_Task, 'duration_secs': 0.461823} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.686009] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 887.686381] env[63028]: INFO nova.compute.manager [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Took 7.74 seconds to spawn the instance on the hypervisor. [ 887.686658] env[63028]: DEBUG nova.compute.manager [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 887.687447] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64239db4-9e54-420d-ac3e-e645f92a3cac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.749358] env[63028]: DEBUG nova.network.neutron [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance_info_cache with network_info: [{"id": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "address": "fa:16:3e:cc:b1:42", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9be02f8-7e", "ovs_interfaceid": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.808820] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e3c5b66a-8429-42f3-9cb6-fcb4bd521431 tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.038s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.809998] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.654s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.810264] env[63028]: DEBUG nova.objects.instance [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lazy-loading 'resources' on Instance uuid 022125c4-2b0c-4a2c-ae63-18968887316e {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.840888] env[63028]: DEBUG oslo_vmware.api [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735765, 'name': ReconfigVM_Task, 'duration_secs': 0.376912} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.843383] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Reconfigured VM instance instance-00000048 to attach disk [datastore2] volume-0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0/volume-0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.848878] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e45524e5-cf90-4ce9-a5b8-b8d35d05fb02 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.865643] env[63028]: DEBUG oslo_vmware.api [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 887.865643] env[63028]: value = "task-2735766" [ 887.865643] env[63028]: _type = "Task" [ 887.865643] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.876324] env[63028]: DEBUG oslo_vmware.api [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735766, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.984345] env[63028]: DEBUG nova.network.neutron [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Successfully created port: 9efd2ef2-d319-4038-ab28-44a46bd597d8 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 888.210929] env[63028]: INFO nova.compute.manager [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Took 45.85 seconds to build instance. [ 888.252514] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.253016] env[63028]: DEBUG nova.objects.instance [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'migration_context' on Instance uuid c06813c4-472d-4bf9-84ec-0d01306bcd48 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 888.333044] env[63028]: DEBUG nova.compute.manager [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 888.367313] env[63028]: DEBUG nova.virt.hardware [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 888.367566] env[63028]: DEBUG nova.virt.hardware [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.367726] env[63028]: DEBUG nova.virt.hardware [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 888.367911] env[63028]: DEBUG nova.virt.hardware [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.368070] env[63028]: DEBUG nova.virt.hardware [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 888.368223] env[63028]: DEBUG nova.virt.hardware [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 888.368438] env[63028]: DEBUG nova.virt.hardware [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 888.368593] env[63028]: DEBUG nova.virt.hardware [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 888.368758] env[63028]: DEBUG nova.virt.hardware [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 888.368920] env[63028]: DEBUG nova.virt.hardware [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 888.369116] env[63028]: DEBUG nova.virt.hardware [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 888.370808] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cc24fc-855e-4ad6-bcba-73c24f737198 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.386353] env[63028]: DEBUG oslo_vmware.api [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735766, 'name': ReconfigVM_Task, 'duration_secs': 0.138597} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.386767] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550793', 'volume_id': '0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0', 'name': 'volume-0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3b90dbb8-66ce-435f-beae-5464720bfb3e', 'attached_at': '', 'detached_at': '', 'volume_id': '0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0', 'serial': '0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 888.389244] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428d7687-d102-4201-919a-eeeadd6f76ea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.445289] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "d663c2df-ae54-4c50-a70f-e2180700c700" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.445548] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "d663c2df-ae54-4c50-a70f-e2180700c700" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.445756] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "d663c2df-ae54-4c50-a70f-e2180700c700-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.445937] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "d663c2df-ae54-4c50-a70f-e2180700c700-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.446126] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "d663c2df-ae54-4c50-a70f-e2180700c700-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.448105] env[63028]: INFO nova.compute.manager [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Terminating instance [ 888.599445] env[63028]: INFO nova.compute.manager [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Rescuing [ 888.599850] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "refresh_cache-1d008794-3c1a-46c6-b4eb-3d5441efdb22" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.599955] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "refresh_cache-1d008794-3c1a-46c6-b4eb-3d5441efdb22" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.600059] env[63028]: DEBUG nova.network.neutron [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 888.713852] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2c8e9d3-c866-44bd-b81e-c77e80e04316 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "70147f2f-0b5e-4343-84e4-8bc195a5485d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.173s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.755841] env[63028]: DEBUG nova.objects.base [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 888.756782] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21dd23c-395e-4505-ad8f-3bc0ab42dd83 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.780431] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbbe656d-6890-4a5e-80c6-7bdf0c2383f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.785933] env[63028]: DEBUG oslo_vmware.api [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 888.785933] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52546fa5-2352-e2f0-f3a5-e01874daf21b" [ 888.785933] env[63028]: _type = "Task" [ 888.785933] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.793513] env[63028]: DEBUG oslo_vmware.api [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52546fa5-2352-e2f0-f3a5-e01874daf21b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.882304] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae50e99-5b1d-4892-a7e4-8c43e575a1ba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.889960] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89064f99-e9b6-4c7e-889b-4b9dec136728 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.923277] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a031ed3-a89c-47fb-ba0a-7a299e8f661f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.931632] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f729705d-2975-4290-8e57-b488483f98f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.946801] env[63028]: DEBUG nova.compute.provider_tree [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.951261] env[63028]: DEBUG nova.compute.manager [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 888.951485] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 888.952304] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1b23bb-4b84-4740-80cf-cde1e99dfdac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.960296] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 888.960694] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97d696cd-d153-46f8-9cfa-b71b3e829344 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.968080] env[63028]: DEBUG oslo_vmware.api [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 888.968080] env[63028]: value = "task-2735767" [ 888.968080] env[63028]: _type = "Task" [ 888.968080] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.977515] env[63028]: DEBUG oslo_vmware.api [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.298987] env[63028]: DEBUG oslo_vmware.api [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52546fa5-2352-e2f0-f3a5-e01874daf21b, 'name': SearchDatastore_Task, 'duration_secs': 0.008558} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.299737] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.372954] env[63028]: DEBUG nova.network.neutron [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Updating instance_info_cache with network_info: [{"id": "4da14eb6-411a-4cdd-afe0-bd34e474882f", "address": "fa:16:3e:43:b4:9d", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4da14eb6-41", "ovs_interfaceid": "4da14eb6-411a-4cdd-afe0-bd34e474882f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.449865] env[63028]: DEBUG nova.scheduler.client.report [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 889.461484] env[63028]: DEBUG nova.objects.instance [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lazy-loading 'flavor' on Instance uuid 3b90dbb8-66ce-435f-beae-5464720bfb3e {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.479707] env[63028]: DEBUG oslo_vmware.api [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735767, 'name': PowerOffVM_Task, 'duration_secs': 0.380799} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.479996] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 889.480237] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 889.480486] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b734fe9-6b0b-4426-b5e7-afbf7ee941c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.511407] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "70147f2f-0b5e-4343-84e4-8bc195a5485d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.511634] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "70147f2f-0b5e-4343-84e4-8bc195a5485d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.511840] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "70147f2f-0b5e-4343-84e4-8bc195a5485d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.512030] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "70147f2f-0b5e-4343-84e4-8bc195a5485d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.512201] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "70147f2f-0b5e-4343-84e4-8bc195a5485d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.514229] env[63028]: INFO nova.compute.manager [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Terminating instance [ 889.547099] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 889.547289] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 889.547472] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Deleting the datastore file [datastore1] d663c2df-ae54-4c50-a70f-e2180700c700 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 889.547737] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f306f903-3533-4c7b-b35d-1bcdb1ae5883 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.553786] env[63028]: DEBUG oslo_vmware.api [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for the task: (returnval){ [ 889.553786] env[63028]: value = "task-2735769" [ 889.553786] env[63028]: _type = "Task" [ 889.553786] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.561739] env[63028]: DEBUG oslo_vmware.api [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735769, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.666484] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "3b90dbb8-66ce-435f-beae-5464720bfb3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.747229] env[63028]: DEBUG nova.compute.manager [req-7a048531-1f13-4e97-8a6e-87cbe2dab23d req-06e1b1c9-30d4-475e-849e-94353276fe77 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received event network-vif-plugged-9efd2ef2-d319-4038-ab28-44a46bd597d8 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 889.747229] env[63028]: DEBUG oslo_concurrency.lockutils [req-7a048531-1f13-4e97-8a6e-87cbe2dab23d req-06e1b1c9-30d4-475e-849e-94353276fe77 service nova] Acquiring lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.747229] env[63028]: DEBUG oslo_concurrency.lockutils [req-7a048531-1f13-4e97-8a6e-87cbe2dab23d req-06e1b1c9-30d4-475e-849e-94353276fe77 service nova] Lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.747229] env[63028]: DEBUG oslo_concurrency.lockutils [req-7a048531-1f13-4e97-8a6e-87cbe2dab23d req-06e1b1c9-30d4-475e-849e-94353276fe77 service nova] Lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.747229] env[63028]: DEBUG nova.compute.manager [req-7a048531-1f13-4e97-8a6e-87cbe2dab23d req-06e1b1c9-30d4-475e-849e-94353276fe77 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] No waiting events found dispatching network-vif-plugged-9efd2ef2-d319-4038-ab28-44a46bd597d8 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 889.747517] env[63028]: WARNING nova.compute.manager [req-7a048531-1f13-4e97-8a6e-87cbe2dab23d req-06e1b1c9-30d4-475e-849e-94353276fe77 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received unexpected event network-vif-plugged-9efd2ef2-d319-4038-ab28-44a46bd597d8 for instance with vm_state building and task_state spawning. [ 889.870992] env[63028]: DEBUG nova.network.neutron [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Successfully updated port: 9efd2ef2-d319-4038-ab28-44a46bd597d8 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 889.874478] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "refresh_cache-1d008794-3c1a-46c6-b4eb-3d5441efdb22" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.954825] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.145s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.957142] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.312s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.957426] env[63028]: DEBUG nova.objects.instance [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lazy-loading 'resources' on Instance uuid 1316318e-8dcf-4ac2-b40a-6a3ab6964997 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.966732] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a8b25c06-f049-419d-8aac-06b17b16552d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "3b90dbb8-66ce-435f-beae-5464720bfb3e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.820s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.967392] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "3b90dbb8-66ce-435f-beae-5464720bfb3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.301s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.967676] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "3b90dbb8-66ce-435f-beae-5464720bfb3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.967881] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "3b90dbb8-66ce-435f-beae-5464720bfb3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.968073] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "3b90dbb8-66ce-435f-beae-5464720bfb3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.970846] env[63028]: INFO nova.compute.manager [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Terminating instance [ 889.980722] env[63028]: INFO nova.scheduler.client.report [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Deleted allocations for instance 022125c4-2b0c-4a2c-ae63-18968887316e [ 890.018068] env[63028]: DEBUG nova.compute.manager [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 890.018357] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 890.019739] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20b3615-21e3-4c9e-97a0-52e13af5431c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.027693] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.027948] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f073afcb-6e30-47d1-9ae9-80637fcbea3c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.036068] env[63028]: DEBUG oslo_vmware.api [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 890.036068] env[63028]: value = "task-2735770" [ 890.036068] env[63028]: _type = "Task" [ 890.036068] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.044766] env[63028]: DEBUG oslo_vmware.api [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735770, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.063649] env[63028]: DEBUG oslo_vmware.api [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Task: {'id': task-2735769, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.302837} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.063866] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 890.064030] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 890.064644] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 890.064644] env[63028]: INFO nova.compute.manager [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Took 1.11 seconds to destroy the instance on the hypervisor. [ 890.064752] env[63028]: DEBUG oslo.service.loopingcall [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.064900] env[63028]: DEBUG nova.compute.manager [-] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 890.064995] env[63028]: DEBUG nova.network.neutron [-] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 890.374172] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.374473] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.374473] env[63028]: DEBUG nova.network.neutron [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.474619] env[63028]: DEBUG nova.compute.manager [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 890.474888] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.475166] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d35eda9-c7a4-4816-8cfc-7a50eb53eb6b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.490324] env[63028]: DEBUG oslo_vmware.api [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 890.490324] env[63028]: value = "task-2735771" [ 890.490324] env[63028]: _type = "Task" [ 890.490324] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.490519] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c70c58a8-01fc-41f4-ab97-625b838707cf tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "022125c4-2b0c-4a2c-ae63-18968887316e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.384s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.500046] env[63028]: DEBUG oslo_vmware.api [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735771, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.551603] env[63028]: DEBUG oslo_vmware.api [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735770, 'name': PowerOffVM_Task, 'duration_secs': 0.245435} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.551879] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 890.552515] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 890.552515] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-982695e9-1501-4133-b08e-d4ddda89f5d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.615420] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 890.616470] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 890.616470] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Deleting the datastore file [datastore2] 70147f2f-0b5e-4343-84e4-8bc195a5485d {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 890.616470] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4e93fd5-7ac5-4fb6-847f-fe89d8dd8bf0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.622691] env[63028]: DEBUG oslo_vmware.api [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 890.622691] env[63028]: value = "task-2735773" [ 890.622691] env[63028]: _type = "Task" [ 890.622691] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.633092] env[63028]: DEBUG oslo_vmware.api [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735773, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.942600] env[63028]: DEBUG nova.network.neutron [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.954721] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78123fcc-bb1d-477c-b571-87abcb7a3668 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.963783] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a1ef0e-44d6-47a3-9d94-169147679aa2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.002549] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990068c0-cffb-454d-a174-64f8af006267 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.011758] env[63028]: DEBUG oslo_vmware.api [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735771, 'name': PowerOffVM_Task, 'duration_secs': 0.200995} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.014871] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.014871] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 891.015125] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550793', 'volume_id': '0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0', 'name': 'volume-0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3b90dbb8-66ce-435f-beae-5464720bfb3e', 'attached_at': '', 'detached_at': '', 'volume_id': '0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0', 'serial': '0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 891.016116] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e621d3-56e0-4513-b08d-4945ceac496f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.019416] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749e804b-da3d-4604-bdbe-07c0d379c911 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.036202] env[63028]: DEBUG nova.compute.provider_tree [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.056904] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07364fb2-7067-4fea-93d4-ceafcbe1cd0f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.064860] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0219de0c-c97b-4db6-b42b-0db0a966e69a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.067492] env[63028]: DEBUG nova.network.neutron [-] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.088468] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70865b7-096c-4dca-9d7b-74e9b26180eb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.107786] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] The volume has not been displaced from its original location: [datastore2] volume-0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0/volume-0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 891.110835] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Reconfiguring VM instance instance-00000048 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 891.111508] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96311f14-e629-47d7-8959-32acecaa6c15 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.138923] env[63028]: DEBUG oslo_vmware.api [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735773, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254022} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.139097] env[63028]: DEBUG oslo_vmware.api [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 891.139097] env[63028]: value = "task-2735774" [ 891.139097] env[63028]: _type = "Task" [ 891.139097] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.139434] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 891.139609] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 891.140078] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 891.140078] env[63028]: INFO nova.compute.manager [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 891.140259] env[63028]: DEBUG oslo.service.loopingcall [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.140500] env[63028]: DEBUG nova.compute.manager [-] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 891.140629] env[63028]: DEBUG nova.network.neutron [-] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 891.150770] env[63028]: DEBUG oslo_vmware.api [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735774, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.268741] env[63028]: DEBUG nova.network.neutron [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updating instance_info_cache with network_info: [{"id": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "address": "fa:16:3e:90:60:0c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9efd2ef2-d3", "ovs_interfaceid": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.325357] env[63028]: DEBUG oslo_concurrency.lockutils [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "15326f55-2db8-47c3-b1fd-ce8ba1174c79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.325631] env[63028]: DEBUG oslo_concurrency.lockutils [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "15326f55-2db8-47c3-b1fd-ce8ba1174c79" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.325833] env[63028]: DEBUG oslo_concurrency.lockutils [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "15326f55-2db8-47c3-b1fd-ce8ba1174c79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.326052] env[63028]: DEBUG oslo_concurrency.lockutils [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "15326f55-2db8-47c3-b1fd-ce8ba1174c79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.326256] env[63028]: DEBUG oslo_concurrency.lockutils [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "15326f55-2db8-47c3-b1fd-ce8ba1174c79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.328280] env[63028]: INFO nova.compute.manager [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Terminating instance [ 891.415021] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.415391] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58e57f63-67d3-4fcd-a9a9-02caf92d8776 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.424799] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 891.424799] env[63028]: value = "task-2735775" [ 891.424799] env[63028]: _type = "Task" [ 891.424799] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.435417] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735775, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.557102] env[63028]: DEBUG nova.scheduler.client.report [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 891.570687] env[63028]: INFO nova.compute.manager [-] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Took 1.51 seconds to deallocate network for instance. [ 891.655148] env[63028]: DEBUG oslo_vmware.api [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735774, 'name': ReconfigVM_Task, 'duration_secs': 0.204305} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.655650] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Reconfigured VM instance instance-00000048 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 891.662800] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ecd36c3-dcdd-4389-afd3-75dd7c89ba7e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.686209] env[63028]: DEBUG oslo_vmware.api [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 891.686209] env[63028]: value = "task-2735776" [ 891.686209] env[63028]: _type = "Task" [ 891.686209] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.695667] env[63028]: DEBUG oslo_vmware.api [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735776, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.772831] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.772831] env[63028]: DEBUG nova.compute.manager [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Instance network_info: |[{"id": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "address": "fa:16:3e:90:60:0c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9efd2ef2-d3", "ovs_interfaceid": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 891.772831] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:60:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9efd2ef2-d319-4038-ab28-44a46bd597d8', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 891.780303] env[63028]: DEBUG oslo.service.loopingcall [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.781874] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 891.783712] env[63028]: DEBUG nova.compute.manager [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received event network-changed-9efd2ef2-d319-4038-ab28-44a46bd597d8 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 891.783931] env[63028]: DEBUG nova.compute.manager [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Refreshing instance network info cache due to event network-changed-9efd2ef2-d319-4038-ab28-44a46bd597d8. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 891.784108] env[63028]: DEBUG oslo_concurrency.lockutils [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] Acquiring lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.784255] env[63028]: DEBUG oslo_concurrency.lockutils [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] Acquired lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.784411] env[63028]: DEBUG nova.network.neutron [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Refreshing network info cache for port 9efd2ef2-d319-4038-ab28-44a46bd597d8 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.785451] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0363edac-19cd-431d-a18d-ddebb68a91f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.806832] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 891.806832] env[63028]: value = "task-2735777" [ 891.806832] env[63028]: _type = "Task" [ 891.806832] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.816198] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735777, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.832373] env[63028]: DEBUG nova.compute.manager [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 891.832373] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 891.833194] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72647949-2c9f-4f23-a181-cddfa2f12a9e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.841361] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.841804] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4dff9e65-b377-4e5d-b2e7-bf583062010f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.848042] env[63028]: DEBUG oslo_vmware.api [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 891.848042] env[63028]: value = "task-2735778" [ 891.848042] env[63028]: _type = "Task" [ 891.848042] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.855751] env[63028]: DEBUG oslo_vmware.api [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735778, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.937075] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735775, 'name': PowerOffVM_Task, 'duration_secs': 0.26477} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.937423] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.938279] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306742ce-5155-4ec6-a2a9-726071dcb919 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.957319] env[63028]: DEBUG nova.network.neutron [-] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.959199] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5040794c-2fac-4cd3-a817-ff57ba3b142e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.992957] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.993279] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-738ed0ba-7f1e-4335-bd59-fd36595bb0e9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.999845] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 891.999845] env[63028]: value = "task-2735779" [ 891.999845] env[63028]: _type = "Task" [ 891.999845] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.010772] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 892.010994] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.011252] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.011398] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.012033] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.012033] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a86faad-223e-48ad-9fb0-f6b5166cb122 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.021755] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.021973] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 892.022729] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5694c3bc-765d-4d94-b772-eeda7581e426 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.028426] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 892.028426] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521d190f-0bbe-864c-cdfb-1b77fde33ee2" [ 892.028426] env[63028]: _type = "Task" [ 892.028426] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.038532] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521d190f-0bbe-864c-cdfb-1b77fde33ee2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.064852] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.108s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.068972] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.901s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.068972] env[63028]: DEBUG nova.objects.instance [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63028) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 892.079839] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.095802] env[63028]: INFO nova.scheduler.client.report [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Deleted allocations for instance 1316318e-8dcf-4ac2-b40a-6a3ab6964997 [ 892.199023] env[63028]: DEBUG oslo_vmware.api [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735776, 'name': ReconfigVM_Task, 'duration_secs': 0.227227} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.199023] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550793', 'volume_id': '0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0', 'name': 'volume-0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3b90dbb8-66ce-435f-beae-5464720bfb3e', 'attached_at': '', 'detached_at': '', 'volume_id': '0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0', 'serial': '0bceb8a6-ea11-4bf7-9f44-f543c4a23bb0'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 892.199023] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 892.199023] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2847e98c-ce0f-4182-87da-a98f474cd3e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.204952] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 892.205564] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0c8234b-bf3a-4b7b-9bb7-29a8d0f40254 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.275460] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 892.275817] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 892.276054] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleting the datastore file [datastore2] 3b90dbb8-66ce-435f-beae-5464720bfb3e {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 892.276363] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-641154ad-1e8b-46fd-8d7b-dda3814b22ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.282825] env[63028]: DEBUG oslo_vmware.api [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 892.282825] env[63028]: value = "task-2735781" [ 892.282825] env[63028]: _type = "Task" [ 892.282825] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.290547] env[63028]: DEBUG oslo_vmware.api [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.317886] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735777, 'name': CreateVM_Task, 'duration_secs': 0.491226} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.318080] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 892.318756] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.318922] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.319261] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 892.319605] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fb93399-9d2b-4f17-8eea-06e809018fb6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.325536] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 892.325536] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525a8edd-346f-728c-8611-98a15bff6a36" [ 892.325536] env[63028]: _type = "Task" [ 892.325536] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.332504] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525a8edd-346f-728c-8611-98a15bff6a36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.358060] env[63028]: DEBUG oslo_vmware.api [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735778, 'name': PowerOffVM_Task, 'duration_secs': 0.275363} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.358364] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 892.358598] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 892.359338] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8156cf7-b5c3-470d-bcdf-d06800b0149c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.416780] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 892.417117] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 892.417185] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Deleting the datastore file [datastore2] 15326f55-2db8-47c3-b1fd-ce8ba1174c79 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 892.417448] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0494797-72fb-4d7f-ac5d-baab5e57ec3e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.423241] env[63028]: DEBUG oslo_vmware.api [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for the task: (returnval){ [ 892.423241] env[63028]: value = "task-2735783" [ 892.423241] env[63028]: _type = "Task" [ 892.423241] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.431709] env[63028]: DEBUG oslo_vmware.api [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735783, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.462774] env[63028]: INFO nova.compute.manager [-] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Took 1.32 seconds to deallocate network for instance. [ 892.506570] env[63028]: DEBUG nova.network.neutron [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updated VIF entry in instance network info cache for port 9efd2ef2-d319-4038-ab28-44a46bd597d8. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 892.507038] env[63028]: DEBUG nova.network.neutron [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updating instance_info_cache with network_info: [{"id": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "address": "fa:16:3e:90:60:0c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9efd2ef2-d3", "ovs_interfaceid": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.538625] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521d190f-0bbe-864c-cdfb-1b77fde33ee2, 'name': SearchDatastore_Task, 'duration_secs': 0.010331} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.539328] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ceb32070-4386-46ff-a513-ffd03da6c065 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.544383] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 892.544383] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52513236-e109-ed82-c695-21cb7a505e1e" [ 892.544383] env[63028]: _type = "Task" [ 892.544383] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.552244] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52513236-e109-ed82-c695-21cb7a505e1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.605978] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3885cc69-0e8c-4570-88b4-1466a9dc892d tempest-ListImageFiltersTestJSON-852699088 tempest-ListImageFiltersTestJSON-852699088-project-member] Lock "1316318e-8dcf-4ac2-b40a-6a3ab6964997" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.879s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.794671] env[63028]: DEBUG oslo_vmware.api [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.260824} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.794888] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.795129] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 892.795318] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 892.795495] env[63028]: INFO nova.compute.manager [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Took 2.32 seconds to destroy the instance on the hypervisor. [ 892.795701] env[63028]: DEBUG oslo.service.loopingcall [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.795855] env[63028]: DEBUG nova.compute.manager [-] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 892.795963] env[63028]: DEBUG nova.network.neutron [-] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 892.835715] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525a8edd-346f-728c-8611-98a15bff6a36, 'name': SearchDatastore_Task, 'duration_secs': 0.009732} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.838058] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.838298] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.838506] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.936491] env[63028]: DEBUG oslo_vmware.api [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Task: {'id': task-2735783, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284442} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.936491] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.936491] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 892.936491] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 892.936491] env[63028]: INFO nova.compute.manager [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Took 1.10 seconds to destroy the instance on the hypervisor. [ 892.936491] env[63028]: DEBUG oslo.service.loopingcall [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.936491] env[63028]: DEBUG nova.compute.manager [-] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 892.936491] env[63028]: DEBUG nova.network.neutron [-] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 892.969989] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.010259] env[63028]: DEBUG oslo_concurrency.lockutils [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] Releasing lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.010439] env[63028]: DEBUG nova.compute.manager [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Received event network-vif-deleted-fea60f3c-e539-418e-abfc-a7a41c223938 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 893.010619] env[63028]: DEBUG nova.compute.manager [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Received event network-vif-deleted-88cffe20-d5e9-44e6-8180-39722a305d2e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 893.010779] env[63028]: INFO nova.compute.manager [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Neutron deleted interface 88cffe20-d5e9-44e6-8180-39722a305d2e; detaching it from the instance and deleting it from the info cache [ 893.010941] env[63028]: DEBUG nova.network.neutron [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.055589] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52513236-e109-ed82-c695-21cb7a505e1e, 'name': SearchDatastore_Task, 'duration_secs': 0.023158} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.055896] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.056203] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 1d008794-3c1a-46c6-b4eb-3d5441efdb22/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk. {{(pid=63028) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 893.056507] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.056698] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.056912] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa264036-cdfa-4476-9cfe-a121f94ba8b9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.059156] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14f0aa89-d71f-4e9a-b518-a00a019bd7af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.067160] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 893.067160] env[63028]: value = "task-2735784" [ 893.067160] env[63028]: _type = "Task" [ 893.067160] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.071503] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.071773] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 893.072858] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d088227b-682c-4748-ae76-2f46c14a6576 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.079498] env[63028]: DEBUG oslo_concurrency.lockutils [None req-93dc5eed-51ca-4a79-934e-c11281bb8abc tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.080659] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735784, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.081323] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 36.831s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.084077] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 893.084077] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c77f11-6057-ce13-6bdf-b72c62eef5ae" [ 893.084077] env[63028]: _type = "Task" [ 893.084077] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.100018] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c77f11-6057-ce13-6bdf-b72c62eef5ae, 'name': SearchDatastore_Task, 'duration_secs': 0.008828} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.102339] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3877e765-26cb-4b8d-9fa7-15749cd94c31 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.109919] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 893.109919] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52659b5c-6609-f5e6-382b-75ce59ddb737" [ 893.109919] env[63028]: _type = "Task" [ 893.109919] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.125023] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52659b5c-6609-f5e6-382b-75ce59ddb737, 'name': SearchDatastore_Task, 'duration_secs': 0.009699} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.125023] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.125023] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] b77ba7d6-305e-4b60-a4b7-9353c12c3920/b77ba7d6-305e-4b60-a4b7-9353c12c3920.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 893.126303] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5eaa810b-5e02-456e-a0eb-10b96d9362db {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.134458] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 893.134458] env[63028]: value = "task-2735785" [ 893.134458] env[63028]: _type = "Task" [ 893.134458] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.145274] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735785, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.513859] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f915b7c-5ec9-464c-9756-c7c888dc71a8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.525979] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812d6784-b441-4d91-8a3a-792445ce79f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.566428] env[63028]: DEBUG nova.compute.manager [req-c6d1b563-1d8a-42b6-9bc3-a7ca2a84b1ce req-48463416-37a9-4fd1-90b0-b7d6ffa5a934 service nova] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Detach interface failed, port_id=88cffe20-d5e9-44e6-8180-39722a305d2e, reason: Instance 70147f2f-0b5e-4343-84e4-8bc195a5485d could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 893.576337] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735784, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501278} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.576621] env[63028]: INFO nova.virt.vmwareapi.ds_util [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 1d008794-3c1a-46c6-b4eb-3d5441efdb22/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk. [ 893.577425] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4614502-6f9b-4a01-ae84-90ada6ba512d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.596022] env[63028]: DEBUG nova.objects.instance [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lazy-loading 'migration_context' on Instance uuid 52b19182-a7e2-4461-b4eb-e6cd8a30024e {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.605572] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 1d008794-3c1a-46c6-b4eb-3d5441efdb22/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 893.607562] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32ceeb34-2718-4231-80c7-61da1d47f0d5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.628989] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 893.628989] env[63028]: value = "task-2735786" [ 893.628989] env[63028]: _type = "Task" [ 893.628989] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.644937] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735786, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.646890] env[63028]: DEBUG nova.network.neutron [-] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.652878] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735785, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.816810] env[63028]: DEBUG nova.compute.manager [req-9d02bc92-14e3-4d8f-9dea-8bbb2a063f02 req-bc43a882-8000-45a5-b4ea-3232ff195637 service nova] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Received event network-vif-deleted-ae90a55f-7d07-4ee6-b266-85db7d8ebdad {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 893.817709] env[63028]: DEBUG nova.compute.manager [req-9d02bc92-14e3-4d8f-9dea-8bbb2a063f02 req-bc43a882-8000-45a5-b4ea-3232ff195637 service nova] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Received event network-vif-deleted-f00fcabf-7289-4128-84ab-c81a45858e92 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 893.818060] env[63028]: INFO nova.compute.manager [req-9d02bc92-14e3-4d8f-9dea-8bbb2a063f02 req-bc43a882-8000-45a5-b4ea-3232ff195637 service nova] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Neutron deleted interface f00fcabf-7289-4128-84ab-c81a45858e92; detaching it from the instance and deleting it from the info cache [ 893.818420] env[63028]: DEBUG nova.network.neutron [req-9d02bc92-14e3-4d8f-9dea-8bbb2a063f02 req-bc43a882-8000-45a5-b4ea-3232ff195637 service nova] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.826078] env[63028]: DEBUG nova.network.neutron [-] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.139165] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735786, 'name': ReconfigVM_Task, 'duration_secs': 0.473708} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.142148] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 1d008794-3c1a-46c6-b4eb-3d5441efdb22/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 894.142964] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be63cebc-df2f-4cc1-b80f-72b31951f205 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.152158] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735785, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.788544} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.169582] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] b77ba7d6-305e-4b60-a4b7-9353c12c3920/b77ba7d6-305e-4b60-a4b7-9353c12c3920.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 894.169823] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 894.170265] env[63028]: INFO nova.compute.manager [-] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Took 1.37 seconds to deallocate network for instance. [ 894.178187] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5af903dc-84f3-43c5-b3d1-267630bc2a9d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.180806] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47f721b2-99cc-4e27-8e0d-ecce17f04d25 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.197265] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 894.197265] env[63028]: value = "task-2735787" [ 894.197265] env[63028]: _type = "Task" [ 894.197265] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.198775] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 894.198775] env[63028]: value = "task-2735788" [ 894.198775] env[63028]: _type = "Task" [ 894.198775] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.215951] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735787, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.218649] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735788, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.321725] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ddc9782-7d27-4ea0-86ae-6f5461ad64d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.330751] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fa842b-893c-4b3a-97e4-908a06f130cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.348537] env[63028]: INFO nova.compute.manager [-] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Took 1.41 seconds to deallocate network for instance. [ 894.383470] env[63028]: DEBUG nova.compute.manager [req-9d02bc92-14e3-4d8f-9dea-8bbb2a063f02 req-bc43a882-8000-45a5-b4ea-3232ff195637 service nova] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Detach interface failed, port_id=f00fcabf-7289-4128-84ab-c81a45858e92, reason: Instance 15326f55-2db8-47c3-b1fd-ce8ba1174c79 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 894.648644] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaba9318-6403-4e12-96fb-3d42fd3624e0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.656672] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa7fb72-a2db-4c2e-8923-06a547b02f85 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.688245] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9686726b-d295-4e31-b74d-d397a05475bf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.695799] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b888876-b863-44e1-847f-db458edec93f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.716619] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735787, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076838} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.717091] env[63028]: DEBUG nova.compute.provider_tree [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.719193] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.723595] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc126455-bec3-4fdc-a0cf-832c6c1e687f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.729017] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735788, 'name': ReconfigVM_Task, 'duration_secs': 0.168933} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.733824] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 894.734099] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45fb1655-e33c-4d81-afc9-ec7a7f43dca1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.754816] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] b77ba7d6-305e-4b60-a4b7-9353c12c3920/b77ba7d6-305e-4b60-a4b7-9353c12c3920.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.755848] env[63028]: INFO nova.compute.manager [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Took 0.58 seconds to detach 1 volumes for instance. [ 894.757911] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bcc534d-d542-4727-85df-2bd30d75b155 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.776743] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 894.776743] env[63028]: value = "task-2735789" [ 894.776743] env[63028]: _type = "Task" [ 894.776743] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.782568] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 894.782568] env[63028]: value = "task-2735790" [ 894.782568] env[63028]: _type = "Task" [ 894.782568] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.788611] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735789, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.795555] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735790, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.860960] env[63028]: DEBUG oslo_concurrency.lockutils [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.227920] env[63028]: DEBUG nova.scheduler.client.report [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 895.281022] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.289649] env[63028]: DEBUG oslo_vmware.api [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735789, 'name': PowerOnVM_Task, 'duration_secs': 0.407465} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.290846] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 895.296119] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735790, 'name': ReconfigVM_Task, 'duration_secs': 0.268743} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.297283] env[63028]: DEBUG nova.compute.manager [None req-5d7cab5c-352a-4bc4-a564-cb4b3ffefd9f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 895.298660] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Reconfigured VM instance instance-0000004b to attach disk [datastore1] b77ba7d6-305e-4b60-a4b7-9353c12c3920/b77ba7d6-305e-4b60-a4b7-9353c12c3920.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 895.299904] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbca4b4f-6048-418d-9ed8-4b63cb6e5128 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.302627] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1707506-6666-45de-b727-3e9d332561dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.310582] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 895.310582] env[63028]: value = "task-2735791" [ 895.310582] env[63028]: _type = "Task" [ 895.310582] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.320976] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735791, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.827181] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735791, 'name': Rename_Task, 'duration_secs': 0.14253} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.827526] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 895.827736] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9c28adc-27f5-4172-ab22-a04b552d5b7e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.834743] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 895.834743] env[63028]: value = "task-2735792" [ 895.834743] env[63028]: _type = "Task" [ 895.834743] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.844476] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735792, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.241867] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.160s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.248083] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.107s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.248341] env[63028]: DEBUG nova.objects.instance [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lazy-loading 'resources' on Instance uuid 13e0ca05-3ab3-43e2-8b0d-8045e26d6723 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 896.349890] env[63028]: DEBUG oslo_vmware.api [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735792, 'name': PowerOnVM_Task, 'duration_secs': 0.473582} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.349890] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 896.349890] env[63028]: INFO nova.compute.manager [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Took 8.01 seconds to spawn the instance on the hypervisor. [ 896.349890] env[63028]: DEBUG nova.compute.manager [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 896.349890] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784b9ed5-d88f-4a3c-9e6d-6011c6c0a986 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.871707] env[63028]: INFO nova.compute.manager [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Took 49.07 seconds to build instance. [ 896.893623] env[63028]: INFO nova.compute.manager [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Unrescuing [ 896.894061] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "refresh_cache-1d008794-3c1a-46c6-b4eb-3d5441efdb22" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.894061] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "refresh_cache-1d008794-3c1a-46c6-b4eb-3d5441efdb22" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.894311] env[63028]: DEBUG nova.network.neutron [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.231905] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc91537-419d-4339-9b54-de8ad851d8fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.240122] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042df0a6-fc94-46a3-aa6d-fe999ef68330 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.283024] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2381704-e4ef-4cac-bef2-bc8807e995fb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.288551] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c233017-37c8-48c1-b96f-45ac50bff6da {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.302726] env[63028]: DEBUG nova.compute.provider_tree [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.376592] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f456b00f-8e30-4002-90c9-4a456674e99d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.228s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.663064] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquiring lock "8f621e7b-0c76-4f70-830d-09d28a2e0736" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.663064] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lock "8f621e7b-0c76-4f70-830d-09d28a2e0736" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.806311] env[63028]: DEBUG nova.scheduler.client.report [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 897.812049] env[63028]: INFO nova.compute.manager [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Swapping old allocation on dict_keys(['399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2']) held by migration d7f275c5-0c12-4e9a-baa7-f99e7b616c4f for instance [ 897.854267] env[63028]: DEBUG nova.scheduler.client.report [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Overwriting current allocation {'allocations': {'399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 100}}, 'project_id': 'e14d427c980c486cbbe8ff0982a30428', 'user_id': '1db2756e554d4fa2a66cff81ab6d4105', 'consumer_generation': 1} on consumer 52b19182-a7e2-4461-b4eb-e6cd8a30024e {{(pid=63028) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 898.001340] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.001716] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquired lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.001788] env[63028]: DEBUG nova.network.neutron [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.085815] env[63028]: DEBUG nova.network.neutron [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Updating instance_info_cache with network_info: [{"id": "4da14eb6-411a-4cdd-afe0-bd34e474882f", "address": "fa:16:3e:43:b4:9d", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4da14eb6-41", "ovs_interfaceid": "4da14eb6-411a-4cdd-afe0-bd34e474882f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.165695] env[63028]: DEBUG nova.compute.manager [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 898.250722] env[63028]: DEBUG nova.compute.manager [req-b3d2bbe9-9483-4a85-a633-ee0db81584d4 req-81ed31e0-4af3-42f5-9390-f0e2ddcd6aa4 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received event network-changed-9efd2ef2-d319-4038-ab28-44a46bd597d8 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 898.250722] env[63028]: DEBUG nova.compute.manager [req-b3d2bbe9-9483-4a85-a633-ee0db81584d4 req-81ed31e0-4af3-42f5-9390-f0e2ddcd6aa4 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Refreshing instance network info cache due to event network-changed-9efd2ef2-d319-4038-ab28-44a46bd597d8. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 898.250722] env[63028]: DEBUG oslo_concurrency.lockutils [req-b3d2bbe9-9483-4a85-a633-ee0db81584d4 req-81ed31e0-4af3-42f5-9390-f0e2ddcd6aa4 service nova] Acquiring lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.250722] env[63028]: DEBUG oslo_concurrency.lockutils [req-b3d2bbe9-9483-4a85-a633-ee0db81584d4 req-81ed31e0-4af3-42f5-9390-f0e2ddcd6aa4 service nova] Acquired lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.250722] env[63028]: DEBUG nova.network.neutron [req-b3d2bbe9-9483-4a85-a633-ee0db81584d4 req-81ed31e0-4af3-42f5-9390-f0e2ddcd6aa4 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Refreshing network info cache for port 9efd2ef2-d319-4038-ab28-44a46bd597d8 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.316462] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.068s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.320079] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.760s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.320334] env[63028]: DEBUG nova.objects.instance [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lazy-loading 'resources' on Instance uuid f4718363-73b2-4016-8849-f75e98259023 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.346179] env[63028]: INFO nova.scheduler.client.report [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Deleted allocations for instance 13e0ca05-3ab3-43e2-8b0d-8045e26d6723 [ 898.587879] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "refresh_cache-1d008794-3c1a-46c6-b4eb-3d5441efdb22" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.588492] env[63028]: DEBUG nova.objects.instance [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lazy-loading 'flavor' on Instance uuid 1d008794-3c1a-46c6-b4eb-3d5441efdb22 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.687727] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.706351] env[63028]: DEBUG nova.network.neutron [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance_info_cache with network_info: [{"id": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "address": "fa:16:3e:45:d9:fe", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dd5be5d-a8", "ovs_interfaceid": "7dd5be5d-a88d-4dcd-a42d-7842895207f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.855372] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35f8f241-e040-40b3-90fb-8ff6ead9ad3e tempest-ServerShowV254Test-799272594 tempest-ServerShowV254Test-799272594-project-member] Lock "13e0ca05-3ab3-43e2-8b0d-8045e26d6723" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.009s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.025603] env[63028]: DEBUG nova.network.neutron [req-b3d2bbe9-9483-4a85-a633-ee0db81584d4 req-81ed31e0-4af3-42f5-9390-f0e2ddcd6aa4 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updated VIF entry in instance network info cache for port 9efd2ef2-d319-4038-ab28-44a46bd597d8. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 899.025964] env[63028]: DEBUG nova.network.neutron [req-b3d2bbe9-9483-4a85-a633-ee0db81584d4 req-81ed31e0-4af3-42f5-9390-f0e2ddcd6aa4 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updating instance_info_cache with network_info: [{"id": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "address": "fa:16:3e:90:60:0c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9efd2ef2-d3", "ovs_interfaceid": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.094015] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b29552-b3fe-41d8-844d-4119ab69a843 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.118974] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.121836] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2fc32bd0-d94e-42e2-b895-91c5a272d786 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.128895] env[63028]: DEBUG oslo_vmware.api [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 899.128895] env[63028]: value = "task-2735793" [ 899.128895] env[63028]: _type = "Task" [ 899.128895] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.138222] env[63028]: DEBUG oslo_vmware.api [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.209553] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Releasing lock "refresh_cache-52b19182-a7e2-4461-b4eb-e6cd8a30024e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.210161] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.210462] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24a9886b-5ac9-40cf-8fa7-969c0d968820 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.219317] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 899.219317] env[63028]: value = "task-2735794" [ 899.219317] env[63028]: _type = "Task" [ 899.219317] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.228075] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735794, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.301093] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a7bae1-8160-4856-b1fe-3a66f9f159d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.309719] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9d0fd4-bdce-4a0f-8169-b55986f4f363 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.343150] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022e7ec3-13bb-4c06-b9e1-4acbd02db397 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.351537] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ec6828-c95a-4289-97af-a4690b9518a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.365845] env[63028]: DEBUG nova.compute.provider_tree [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.529470] env[63028]: DEBUG oslo_concurrency.lockutils [req-b3d2bbe9-9483-4a85-a633-ee0db81584d4 req-81ed31e0-4af3-42f5-9390-f0e2ddcd6aa4 service nova] Releasing lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.639808] env[63028]: DEBUG oslo_vmware.api [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735793, 'name': PowerOffVM_Task, 'duration_secs': 0.225263} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.640556] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.645671] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Reconfiguring VM instance instance-00000046 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 899.645768] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f618cc5e-1d8b-45cd-a1fb-f278154a6990 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.664228] env[63028]: DEBUG oslo_vmware.api [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 899.664228] env[63028]: value = "task-2735795" [ 899.664228] env[63028]: _type = "Task" [ 899.664228] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.672126] env[63028]: DEBUG oslo_vmware.api [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735795, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.729481] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735794, 'name': PowerOffVM_Task, 'duration_secs': 0.26453} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.729755] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.730439] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:57:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bd41cb6c-4f59-47c8-83b3-1b15a3032d86',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-149043815',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 899.730651] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 899.730807] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 899.730986] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 899.731142] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 899.731301] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 899.731493] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 899.731681] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 899.731862] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 899.732042] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 899.732222] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 899.737045] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9add85d3-e4fb-4a05-ab13-8cb2c48d0f42 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.752213] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 899.752213] env[63028]: value = "task-2735796" [ 899.752213] env[63028]: _type = "Task" [ 899.752213] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.760327] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735796, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.870415] env[63028]: DEBUG nova.scheduler.client.report [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 900.177448] env[63028]: DEBUG oslo_vmware.api [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735795, 'name': ReconfigVM_Task, 'duration_secs': 0.354058} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.177731] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Reconfigured VM instance instance-00000046 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 900.177903] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 900.178165] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89f8f16c-b9d4-40fc-82e4-b836b90d61f7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.184310] env[63028]: DEBUG oslo_vmware.api [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 900.184310] env[63028]: value = "task-2735797" [ 900.184310] env[63028]: _type = "Task" [ 900.184310] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.191811] env[63028]: DEBUG oslo_vmware.api [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735797, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.261718] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735796, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.375956] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.056s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.378625] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.190s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.380922] env[63028]: INFO nova.compute.claims [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 900.401587] env[63028]: INFO nova.scheduler.client.report [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleted allocations for instance f4718363-73b2-4016-8849-f75e98259023 [ 900.694417] env[63028]: DEBUG oslo_vmware.api [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735797, 'name': PowerOnVM_Task, 'duration_secs': 0.404279} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.694643] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 900.694949] env[63028]: DEBUG nova.compute.manager [None req-9dbb446a-bc3f-4f4c-a3ab-ed4565caac38 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 900.695756] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4457f568-ab67-498f-a268-2cd5a1d5f6fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.763156] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735796, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.910501] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37a0163f-d4ad-4f5b-b7a7-2c6f260b44ef tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "f4718363-73b2-4016-8849-f75e98259023" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.038s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.126468] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "3e45e7f3-a34f-4eab-9fff-1c874c832e2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.126733] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "3e45e7f3-a34f-4eab-9fff-1c874c832e2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.126948] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "3e45e7f3-a34f-4eab-9fff-1c874c832e2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.127150] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "3e45e7f3-a34f-4eab-9fff-1c874c832e2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.127325] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "3e45e7f3-a34f-4eab-9fff-1c874c832e2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.130059] env[63028]: INFO nova.compute.manager [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Terminating instance [ 901.263960] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735796, 'name': ReconfigVM_Task, 'duration_secs': 1.181741} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.264348] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18241461-ec6c-4b98-9ee3-48d278ea3d93 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.286193] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:57:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bd41cb6c-4f59-47c8-83b3-1b15a3032d86',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-149043815',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 901.286446] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.286603] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 901.286804] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.287047] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 901.287220] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 901.287441] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 901.287607] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 901.287773] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 901.287936] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 901.288122] env[63028]: DEBUG nova.virt.hardware [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 901.288900] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cfafe39-7a37-4cd7-b555-a955e356d374 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.294069] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 901.294069] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5243f606-047a-bdde-e18c-7265da753d15" [ 901.294069] env[63028]: _type = "Task" [ 901.294069] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.301354] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5243f606-047a-bdde-e18c-7265da753d15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.633524] env[63028]: DEBUG nova.compute.manager [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 901.633736] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 901.634614] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470910ab-7215-41d7-bed3-b05c61d55ae2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.642911] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 901.643450] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-896df0d1-a629-4db5-b644-2ae63aad6fc0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.648564] env[63028]: DEBUG oslo_vmware.api [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 901.648564] env[63028]: value = "task-2735798" [ 901.648564] env[63028]: _type = "Task" [ 901.648564] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.658220] env[63028]: DEBUG oslo_vmware.api [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735798, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.775375] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48774fbd-1616-41be-8f83-1dbd91a26f55 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.782743] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48682c15-6598-4c3f-ad9d-4c353b26600c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.818343] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb19e66-e38e-48a6-afdd-83779e60375f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.826506] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5243f606-047a-bdde-e18c-7265da753d15, 'name': SearchDatastore_Task, 'duration_secs': 0.007018} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.833683] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Reconfiguring VM instance instance-00000037 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 901.834058] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6cce6cd4-8f24-4024-b5d1-0e40e51af7f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.847993] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3335cee4-d622-48f0-bcbe-20787b8c40dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.862869] env[63028]: DEBUG nova.compute.provider_tree [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.865416] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 901.865416] env[63028]: value = "task-2735799" [ 901.865416] env[63028]: _type = "Task" [ 901.865416] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.873447] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735799, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.167945] env[63028]: DEBUG oslo_vmware.api [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735798, 'name': PowerOffVM_Task, 'duration_secs': 0.2059} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.168178] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 902.168220] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 902.168588] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-372b197b-5a55-493b-99d9-c61f9762c53f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.234855] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 902.234855] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 902.234965] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleting the datastore file [datastore1] 3e45e7f3-a34f-4eab-9fff-1c874c832e2a {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 902.235246] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0266e397-0aa9-4ce9-8c0a-2d13e10502bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.241883] env[63028]: DEBUG oslo_vmware.api [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 902.241883] env[63028]: value = "task-2735801" [ 902.241883] env[63028]: _type = "Task" [ 902.241883] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.249398] env[63028]: DEBUG oslo_vmware.api [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735801, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.367971] env[63028]: DEBUG nova.scheduler.client.report [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 902.380253] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735799, 'name': ReconfigVM_Task, 'duration_secs': 0.174757} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.380595] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Reconfigured VM instance instance-00000037 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 902.381427] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e689faa-1321-4590-99fd-b45a9e06b6f4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.404667] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 52b19182-a7e2-4461-b4eb-e6cd8a30024e/52b19182-a7e2-4461-b4eb-e6cd8a30024e.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.405269] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b51fea0-871d-4046-84d0-5f666d16e983 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.424858] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 902.424858] env[63028]: value = "task-2735802" [ 902.424858] env[63028]: _type = "Task" [ 902.424858] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.435228] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735802, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.752107] env[63028]: DEBUG oslo_vmware.api [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735801, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146147} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.752333] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 902.752542] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 902.752723] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 902.752900] env[63028]: INFO nova.compute.manager [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 902.753157] env[63028]: DEBUG oslo.service.loopingcall [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 902.753348] env[63028]: DEBUG nova.compute.manager [-] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 902.753441] env[63028]: DEBUG nova.network.neutron [-] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 902.878917] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.498s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.878917] env[63028]: DEBUG nova.compute.manager [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 902.880564] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.849s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.880564] env[63028]: DEBUG nova.objects.instance [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lazy-loading 'resources' on Instance uuid 4e859327-ccd3-440e-b884-67f6cdadf97f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.937305] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735802, 'name': ReconfigVM_Task, 'duration_secs': 0.271058} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.937598] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 52b19182-a7e2-4461-b4eb-e6cd8a30024e/52b19182-a7e2-4461-b4eb-e6cd8a30024e.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.938454] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e63df7c-1064-4df0-bd08-d13a4820bba8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.968805] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97bb96c1-5378-499d-8dad-89da08a7316d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.989787] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded4e65a-f7ed-40da-b840-7e64ed3962bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.016354] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4767b0-c1a8-4de9-a612-a7d4e6e37d7e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.024909] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.025088] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a9568b9-b8ae-4980-a554-7094a242626e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.032039] env[63028]: DEBUG nova.compute.manager [req-7fffcced-2ffb-4965-9120-bd3ea904542d req-5e4a702a-04eb-4850-846e-fb939e75ac01 service nova] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Received event network-vif-deleted-dfcc9f28-fdc3-4d22-a5a0-b2704f142312 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 903.032039] env[63028]: INFO nova.compute.manager [req-7fffcced-2ffb-4965-9120-bd3ea904542d req-5e4a702a-04eb-4850-846e-fb939e75ac01 service nova] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Neutron deleted interface dfcc9f28-fdc3-4d22-a5a0-b2704f142312; detaching it from the instance and deleting it from the info cache [ 903.032039] env[63028]: DEBUG nova.network.neutron [req-7fffcced-2ffb-4965-9120-bd3ea904542d req-5e4a702a-04eb-4850-846e-fb939e75ac01 service nova] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.033948] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 903.033948] env[63028]: value = "task-2735803" [ 903.033948] env[63028]: _type = "Task" [ 903.033948] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.046970] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735803, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.383334] env[63028]: DEBUG nova.compute.utils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 903.388206] env[63028]: DEBUG nova.compute.manager [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 903.388386] env[63028]: DEBUG nova.network.neutron [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 903.443279] env[63028]: DEBUG nova.policy [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8478e45562394a0d8fafc5e3e1218fd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05118b378b5e4d838962db2378b381bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 903.518240] env[63028]: DEBUG nova.network.neutron [-] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.537367] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ed27012-2cf6-4cff-9259-c570c26056b7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.551193] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735803, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.559329] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0220525c-4f81-4786-b8d0-faf538962fd7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.602131] env[63028]: DEBUG nova.compute.manager [req-7fffcced-2ffb-4965-9120-bd3ea904542d req-5e4a702a-04eb-4850-846e-fb939e75ac01 service nova] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Detach interface failed, port_id=dfcc9f28-fdc3-4d22-a5a0-b2704f142312, reason: Instance 3e45e7f3-a34f-4eab-9fff-1c874c832e2a could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 903.820746] env[63028]: DEBUG nova.network.neutron [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Successfully created port: cd8436f9-6412-468e-bd24-f9d845d3ca21 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 903.889608] env[63028]: DEBUG nova.compute.manager [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 903.914431] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c73cdd-1bde-4dc1-9869-9a554e181053 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.922892] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8466625-8213-4b7d-921d-c51aef2dd9f3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.953982] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3660b12-1097-47bd-8b0f-58f38b1c2a03 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.961073] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195b92dd-e411-495c-8c31-255949255deb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.973869] env[63028]: DEBUG nova.compute.provider_tree [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.024669] env[63028]: INFO nova.compute.manager [-] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Took 1.27 seconds to deallocate network for instance. [ 904.044989] env[63028]: DEBUG oslo_vmware.api [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735803, 'name': PowerOnVM_Task, 'duration_secs': 0.544167} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.045271] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.247594] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.247829] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.477333] env[63028]: DEBUG nova.scheduler.client.report [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 904.533524] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.750120] env[63028]: DEBUG nova.compute.manager [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 904.899192] env[63028]: DEBUG nova.compute.manager [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 904.932106] env[63028]: DEBUG nova.virt.hardware [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 904.932106] env[63028]: DEBUG nova.virt.hardware [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 904.932106] env[63028]: DEBUG nova.virt.hardware [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 904.932106] env[63028]: DEBUG nova.virt.hardware [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 904.932106] env[63028]: DEBUG nova.virt.hardware [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 904.932106] env[63028]: DEBUG nova.virt.hardware [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 904.932106] env[63028]: DEBUG nova.virt.hardware [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 904.932106] env[63028]: DEBUG nova.virt.hardware [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 904.933102] env[63028]: DEBUG nova.virt.hardware [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 904.933487] env[63028]: DEBUG nova.virt.hardware [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 904.933860] env[63028]: DEBUG nova.virt.hardware [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 904.934976] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29994863-4644-4f96-b531-d941a099cce8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.945071] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912814fc-3407-46e0-a42d-8514e76acca2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.985529] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.106s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.990047] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.383s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.991431] env[63028]: INFO nova.compute.claims [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.007479] env[63028]: INFO nova.scheduler.client.report [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Deleted allocations for instance 4e859327-ccd3-440e-b884-67f6cdadf97f [ 905.057219] env[63028]: INFO nova.compute.manager [None req-c74f3c02-cb2d-48e5-b35b-047d36f54ebf tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance to original state: 'active' [ 905.261797] env[63028]: DEBUG nova.compute.manager [req-a5489f05-1931-4ecd-985c-faea92e0efd2 req-ecf199f7-ed18-4615-8cd8-9035667ba60b service nova] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Received event network-vif-plugged-cd8436f9-6412-468e-bd24-f9d845d3ca21 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 905.262042] env[63028]: DEBUG oslo_concurrency.lockutils [req-a5489f05-1931-4ecd-985c-faea92e0efd2 req-ecf199f7-ed18-4615-8cd8-9035667ba60b service nova] Acquiring lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.262258] env[63028]: DEBUG oslo_concurrency.lockutils [req-a5489f05-1931-4ecd-985c-faea92e0efd2 req-ecf199f7-ed18-4615-8cd8-9035667ba60b service nova] Lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.262431] env[63028]: DEBUG oslo_concurrency.lockutils [req-a5489f05-1931-4ecd-985c-faea92e0efd2 req-ecf199f7-ed18-4615-8cd8-9035667ba60b service nova] Lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.262621] env[63028]: DEBUG nova.compute.manager [req-a5489f05-1931-4ecd-985c-faea92e0efd2 req-ecf199f7-ed18-4615-8cd8-9035667ba60b service nova] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] No waiting events found dispatching network-vif-plugged-cd8436f9-6412-468e-bd24-f9d845d3ca21 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 905.262755] env[63028]: WARNING nova.compute.manager [req-a5489f05-1931-4ecd-985c-faea92e0efd2 req-ecf199f7-ed18-4615-8cd8-9035667ba60b service nova] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Received unexpected event network-vif-plugged-cd8436f9-6412-468e-bd24-f9d845d3ca21 for instance with vm_state building and task_state spawning. [ 905.268287] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.347570] env[63028]: DEBUG nova.network.neutron [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Successfully updated port: cd8436f9-6412-468e-bd24-f9d845d3ca21 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 905.515531] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b38f1040-866d-4003-80a9-2af0d3f0d4c9 tempest-ServersTestMultiNic-275042728 tempest-ServersTestMultiNic-275042728-project-member] Lock "4e859327-ccd3-440e-b884-67f6cdadf97f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.834s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.851906] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.853141] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.853141] env[63028]: DEBUG nova.network.neutron [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 906.386193] env[63028]: DEBUG nova.network.neutron [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 906.422453] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16dc59a1-05cd-4568-8eea-dfe31327b394 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.434387] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ef76fb-502d-44df-9249-1c21612ebda8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.465021] env[63028]: DEBUG oslo_concurrency.lockutils [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.465021] env[63028]: DEBUG oslo_concurrency.lockutils [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.465207] env[63028]: DEBUG oslo_concurrency.lockutils [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.465625] env[63028]: DEBUG oslo_concurrency.lockutils [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.466200] env[63028]: DEBUG oslo_concurrency.lockutils [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.471008] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4e60cd-6a1e-435a-b466-ce4ca0f9ad64 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.473763] env[63028]: INFO nova.compute.manager [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Terminating instance [ 906.481481] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e31a2d6-e457-4ae0-846d-d0ff9f0f93ef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.498605] env[63028]: DEBUG nova.compute.provider_tree [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.537064] env[63028]: DEBUG nova.network.neutron [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance_info_cache with network_info: [{"id": "cd8436f9-6412-468e-bd24-f9d845d3ca21", "address": "fa:16:3e:ff:70:93", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd8436f9-64", "ovs_interfaceid": "cd8436f9-6412-468e-bd24-f9d845d3ca21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.977990] env[63028]: DEBUG nova.compute.manager [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 906.978277] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 906.979214] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad12976d-7841-4e6f-877d-6f60b0888153 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.986487] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.986905] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d51d1da-3df6-4cb4-9d89-63bfb6ce23d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.992649] env[63028]: DEBUG oslo_vmware.api [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 906.992649] env[63028]: value = "task-2735804" [ 906.992649] env[63028]: _type = "Task" [ 906.992649] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.001535] env[63028]: DEBUG nova.scheduler.client.report [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 907.006525] env[63028]: DEBUG oslo_vmware.api [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735804, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.039196] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.039526] env[63028]: DEBUG nova.compute.manager [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Instance network_info: |[{"id": "cd8436f9-6412-468e-bd24-f9d845d3ca21", "address": "fa:16:3e:ff:70:93", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd8436f9-64", "ovs_interfaceid": "cd8436f9-6412-468e-bd24-f9d845d3ca21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 907.039942] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:70:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5446413d-c3b0-4cd2-a962-62240db178ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd8436f9-6412-468e-bd24-f9d845d3ca21', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 907.047449] env[63028]: DEBUG oslo.service.loopingcall [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.047663] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 907.047884] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec921b42-f83d-4e77-87b0-a443394f0571 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.069209] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 907.069209] env[63028]: value = "task-2735805" [ 907.069209] env[63028]: _type = "Task" [ 907.069209] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.076891] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735805, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.325472] env[63028]: DEBUG nova.compute.manager [req-ce13adb9-32b8-4e15-a978-d2de5f117eee req-e8cf72a8-7f89-4c72-9d71-d688b2421638 service nova] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Received event network-changed-cd8436f9-6412-468e-bd24-f9d845d3ca21 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 907.325616] env[63028]: DEBUG nova.compute.manager [req-ce13adb9-32b8-4e15-a978-d2de5f117eee req-e8cf72a8-7f89-4c72-9d71-d688b2421638 service nova] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Refreshing instance network info cache due to event network-changed-cd8436f9-6412-468e-bd24-f9d845d3ca21. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 907.325854] env[63028]: DEBUG oslo_concurrency.lockutils [req-ce13adb9-32b8-4e15-a978-d2de5f117eee req-e8cf72a8-7f89-4c72-9d71-d688b2421638 service nova] Acquiring lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.325968] env[63028]: DEBUG oslo_concurrency.lockutils [req-ce13adb9-32b8-4e15-a978-d2de5f117eee req-e8cf72a8-7f89-4c72-9d71-d688b2421638 service nova] Acquired lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.326147] env[63028]: DEBUG nova.network.neutron [req-ce13adb9-32b8-4e15-a978-d2de5f117eee req-e8cf72a8-7f89-4c72-9d71-d688b2421638 service nova] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Refreshing network info cache for port cd8436f9-6412-468e-bd24-f9d845d3ca21 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 907.502107] env[63028]: DEBUG oslo_vmware.api [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735804, 'name': PowerOffVM_Task, 'duration_secs': 0.18709} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.502575] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.502942] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.503270] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43cdbd16-0b66-4f34-adfa-1d17521230c3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.507909] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.518s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.508593] env[63028]: DEBUG nova.compute.manager [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 907.511500] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.794s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.511746] env[63028]: DEBUG nova.objects.instance [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lazy-loading 'resources' on Instance uuid 9773ad95-1894-471d-8020-c7952eac4be4 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 907.580269] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735805, 'name': CreateVM_Task, 'duration_secs': 0.50554} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.580573] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 907.581393] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.581567] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.582125] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 907.582254] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eeda347b-c672-4790-905a-236083f2c118 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.588056] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.588262] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.588435] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Deleting the datastore file [datastore2] 52b19182-a7e2-4461-b4eb-e6cd8a30024e {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.589039] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd07ff7f-eca5-4910-b522-1443e201500e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.595677] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 907.595677] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528b8fee-430e-afce-b13a-9037c364f9c3" [ 907.595677] env[63028]: _type = "Task" [ 907.595677] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.602616] env[63028]: DEBUG oslo_vmware.api [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 907.602616] env[63028]: value = "task-2735807" [ 907.602616] env[63028]: _type = "Task" [ 907.602616] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.612274] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528b8fee-430e-afce-b13a-9037c364f9c3, 'name': SearchDatastore_Task, 'duration_secs': 0.009603} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.613071] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.613408] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.613754] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.614028] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.614334] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 907.617508] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-471c9d17-e7da-482e-b8d4-fcae67fe4d38 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.620042] env[63028]: DEBUG oslo_vmware.api [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.625472] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 907.625863] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 907.626800] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1195ff2f-200a-4f49-9716-d849a808db81 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.632949] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 907.632949] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52802fc3-0b30-cb8f-0920-fc33d332358d" [ 907.632949] env[63028]: _type = "Task" [ 907.632949] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.641677] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52802fc3-0b30-cb8f-0920-fc33d332358d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.018302] env[63028]: DEBUG nova.compute.utils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 908.023248] env[63028]: DEBUG nova.compute.manager [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 908.023248] env[63028]: DEBUG nova.network.neutron [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 908.097427] env[63028]: DEBUG nova.policy [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c022ca18b0a41ce9d790fa25f6ebf8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea26842446ec4691a6456a6659188704', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 908.117414] env[63028]: DEBUG oslo_vmware.api [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154471} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.117674] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.117860] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.118079] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.118225] env[63028]: INFO nova.compute.manager [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 908.118489] env[63028]: DEBUG oslo.service.loopingcall [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.118635] env[63028]: DEBUG nova.compute.manager [-] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.118729] env[63028]: DEBUG nova.network.neutron [-] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.125788] env[63028]: DEBUG nova.network.neutron [req-ce13adb9-32b8-4e15-a978-d2de5f117eee req-e8cf72a8-7f89-4c72-9d71-d688b2421638 service nova] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updated VIF entry in instance network info cache for port cd8436f9-6412-468e-bd24-f9d845d3ca21. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 908.125961] env[63028]: DEBUG nova.network.neutron [req-ce13adb9-32b8-4e15-a978-d2de5f117eee req-e8cf72a8-7f89-4c72-9d71-d688b2421638 service nova] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance_info_cache with network_info: [{"id": "cd8436f9-6412-468e-bd24-f9d845d3ca21", "address": "fa:16:3e:ff:70:93", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd8436f9-64", "ovs_interfaceid": "cd8436f9-6412-468e-bd24-f9d845d3ca21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.143572] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52802fc3-0b30-cb8f-0920-fc33d332358d, 'name': SearchDatastore_Task, 'duration_secs': 0.008234} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.147230] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73557c2f-ba13-467e-bc48-c08253fd4f7e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.153364] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 908.153364] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523380bf-5f88-4ee1-5ecf-7bde794bfecd" [ 908.153364] env[63028]: _type = "Task" [ 908.153364] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.161241] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523380bf-5f88-4ee1-5ecf-7bde794bfecd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.526396] env[63028]: DEBUG nova.compute.manager [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 908.589107] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1482ad-ac8a-4560-a3fd-9335e18c965b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.597857] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafb8d76-634e-4249-ab72-0b1eb342853b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.633457] env[63028]: DEBUG oslo_concurrency.lockutils [req-ce13adb9-32b8-4e15-a978-d2de5f117eee req-e8cf72a8-7f89-4c72-9d71-d688b2421638 service nova] Releasing lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.634197] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453df077-647f-40c8-8612-f323f37e6c1b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.641977] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f33cc9-3249-4e8c-b84d-8831597af1c9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.657485] env[63028]: DEBUG nova.compute.provider_tree [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.666524] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523380bf-5f88-4ee1-5ecf-7bde794bfecd, 'name': SearchDatastore_Task, 'duration_secs': 0.010225} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.667294] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.668694] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] ed872f21-c2c4-4597-8c9e-9f8d2202b707/ed872f21-c2c4-4597-8c9e-9f8d2202b707.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 908.668694] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b269ef98-984c-4746-a8c9-b731362dd171 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.677402] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 908.677402] env[63028]: value = "task-2735808" [ 908.677402] env[63028]: _type = "Task" [ 908.677402] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.687290] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.687897] env[63028]: DEBUG nova.network.neutron [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Successfully created port: 8ed03cbb-e718-4686-a803-4082f3e01d60 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 909.041472] env[63028]: DEBUG nova.network.neutron [-] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.163719] env[63028]: DEBUG nova.scheduler.client.report [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 909.191860] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735808, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.361156] env[63028]: DEBUG nova.compute.manager [req-88441391-c900-4d99-9441-6d92dc900cb9 req-b11f392e-bbbc-4570-ba9b-e7a552d32688 service nova] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Received event network-vif-deleted-7dd5be5d-a88d-4dcd-a42d-7842895207f7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 909.537190] env[63028]: DEBUG nova.compute.manager [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 909.544955] env[63028]: INFO nova.compute.manager [-] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Took 1.43 seconds to deallocate network for instance. [ 909.572100] env[63028]: DEBUG nova.virt.hardware [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 909.572398] env[63028]: DEBUG nova.virt.hardware [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 909.572561] env[63028]: DEBUG nova.virt.hardware [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 909.572743] env[63028]: DEBUG nova.virt.hardware [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 909.572886] env[63028]: DEBUG nova.virt.hardware [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 909.573043] env[63028]: DEBUG nova.virt.hardware [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 909.573259] env[63028]: DEBUG nova.virt.hardware [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 909.573416] env[63028]: DEBUG nova.virt.hardware [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 909.573585] env[63028]: DEBUG nova.virt.hardware [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 909.574117] env[63028]: DEBUG nova.virt.hardware [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 909.574117] env[63028]: DEBUG nova.virt.hardware [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 909.574937] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde136fa-513c-4e41-ad58-ddeb392d40ed {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.583479] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d567124-8586-425c-a972-2604b2859bf5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.668929] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.157s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.671513] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.860s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.673195] env[63028]: INFO nova.compute.claims [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.688957] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.71064} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.689195] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] ed872f21-c2c4-4597-8c9e-9f8d2202b707/ed872f21-c2c4-4597-8c9e-9f8d2202b707.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 909.689424] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 909.689719] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fab1a11b-785c-4cfa-a634-fd9ebc4dd400 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.698217] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 909.698217] env[63028]: value = "task-2735809" [ 909.698217] env[63028]: _type = "Task" [ 909.698217] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.702181] env[63028]: INFO nova.scheduler.client.report [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleted allocations for instance 9773ad95-1894-471d-8020-c7952eac4be4 [ 909.709924] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.052575] env[63028]: DEBUG oslo_concurrency.lockutils [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.210457] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.134653} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.210457] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 910.210544] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73233385-49c4-404b-9c4c-d925f7330a9e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.216835] env[63028]: DEBUG oslo_concurrency.lockutils [None req-35aa04db-5b6b-491e-aca8-e98c4728591d tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "9773ad95-1894-471d-8020-c7952eac4be4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.289s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.238854] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] ed872f21-c2c4-4597-8c9e-9f8d2202b707/ed872f21-c2c4-4597-8c9e-9f8d2202b707.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 910.241120] env[63028]: DEBUG nova.network.neutron [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Successfully updated port: 8ed03cbb-e718-4686-a803-4082f3e01d60 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 910.242359] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7485cf5e-7440-4023-9e76-2d193987b3c1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.261470] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "refresh_cache-addcf94a-1a56-49ff-8adb-3ce7f2d1e09e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.261701] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "refresh_cache-addcf94a-1a56-49ff-8adb-3ce7f2d1e09e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.262137] env[63028]: DEBUG nova.network.neutron [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 910.269194] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 910.269194] env[63028]: value = "task-2735810" [ 910.269194] env[63028]: _type = "Task" [ 910.269194] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.278766] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735810, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.632527] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "cd11b318-9158-4f1d-8aa8-1c9d565bb5d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.632527] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "cd11b318-9158-4f1d-8aa8-1c9d565bb5d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.632767] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "cd11b318-9158-4f1d-8aa8-1c9d565bb5d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.633236] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "cd11b318-9158-4f1d-8aa8-1c9d565bb5d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.633387] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "cd11b318-9158-4f1d-8aa8-1c9d565bb5d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.635707] env[63028]: INFO nova.compute.manager [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Terminating instance [ 910.779345] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735810, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.815159] env[63028]: DEBUG nova.network.neutron [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 911.029316] env[63028]: DEBUG nova.network.neutron [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Updating instance_info_cache with network_info: [{"id": "8ed03cbb-e718-4686-a803-4082f3e01d60", "address": "fa:16:3e:66:85:8a", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ed03cbb-e7", "ovs_interfaceid": "8ed03cbb-e718-4686-a803-4082f3e01d60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.143636] env[63028]: DEBUG nova.compute.manager [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 911.143905] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 911.145254] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8898a6-1756-4653-8179-35afe0384cb4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.152697] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 911.152993] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5d6d1e8-189a-4495-a367-ed006ecf759c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.161768] env[63028]: DEBUG oslo_vmware.api [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 911.161768] env[63028]: value = "task-2735811" [ 911.161768] env[63028]: _type = "Task" [ 911.161768] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.169159] env[63028]: DEBUG oslo_vmware.api [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735811, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.170853] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ddc33b6-6da9-4766-8603-26c1834229c9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.177702] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f349a4a-f604-43d7-924a-4a39980a9bdf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.187254] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "672695c2-06f3-4790-a459-4b575baf29d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.187490] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "672695c2-06f3-4790-a459-4b575baf29d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.213927] env[63028]: DEBUG nova.compute.manager [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 911.216956] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc805061-71b0-440c-a83b-bb4c75b6fa0d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.225949] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f27f767-0a59-4484-9ed9-341de2325c98 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.241187] env[63028]: DEBUG nova.compute.provider_tree [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.280655] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735810, 'name': ReconfigVM_Task, 'duration_secs': 0.650025} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.280931] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Reconfigured VM instance instance-0000004c to attach disk [datastore2] ed872f21-c2c4-4597-8c9e-9f8d2202b707/ed872f21-c2c4-4597-8c9e-9f8d2202b707.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 911.281586] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b82f878-95fc-4631-a40a-b8b1ab6f309a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.287192] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 911.287192] env[63028]: value = "task-2735812" [ 911.287192] env[63028]: _type = "Task" [ 911.287192] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.295555] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735812, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.394163] env[63028]: DEBUG nova.compute.manager [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Received event network-vif-plugged-8ed03cbb-e718-4686-a803-4082f3e01d60 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 911.394817] env[63028]: DEBUG oslo_concurrency.lockutils [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] Acquiring lock "addcf94a-1a56-49ff-8adb-3ce7f2d1e09e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.394817] env[63028]: DEBUG oslo_concurrency.lockutils [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] Lock "addcf94a-1a56-49ff-8adb-3ce7f2d1e09e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.394817] env[63028]: DEBUG oslo_concurrency.lockutils [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] Lock "addcf94a-1a56-49ff-8adb-3ce7f2d1e09e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.395143] env[63028]: DEBUG nova.compute.manager [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] No waiting events found dispatching network-vif-plugged-8ed03cbb-e718-4686-a803-4082f3e01d60 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 911.395499] env[63028]: WARNING nova.compute.manager [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Received unexpected event network-vif-plugged-8ed03cbb-e718-4686-a803-4082f3e01d60 for instance with vm_state building and task_state spawning. [ 911.395499] env[63028]: DEBUG nova.compute.manager [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Received event network-changed-8ed03cbb-e718-4686-a803-4082f3e01d60 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 911.395707] env[63028]: DEBUG nova.compute.manager [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Refreshing instance network info cache due to event network-changed-8ed03cbb-e718-4686-a803-4082f3e01d60. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 911.395895] env[63028]: DEBUG oslo_concurrency.lockutils [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] Acquiring lock "refresh_cache-addcf94a-1a56-49ff-8adb-3ce7f2d1e09e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.531575] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "refresh_cache-addcf94a-1a56-49ff-8adb-3ce7f2d1e09e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.531954] env[63028]: DEBUG nova.compute.manager [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Instance network_info: |[{"id": "8ed03cbb-e718-4686-a803-4082f3e01d60", "address": "fa:16:3e:66:85:8a", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ed03cbb-e7", "ovs_interfaceid": "8ed03cbb-e718-4686-a803-4082f3e01d60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 911.532276] env[63028]: DEBUG oslo_concurrency.lockutils [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] Acquired lock "refresh_cache-addcf94a-1a56-49ff-8adb-3ce7f2d1e09e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.532453] env[63028]: DEBUG nova.network.neutron [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Refreshing network info cache for port 8ed03cbb-e718-4686-a803-4082f3e01d60 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 911.533719] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:85:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '706c9762-1cf8-4770-897d-377d0d927773', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ed03cbb-e718-4686-a803-4082f3e01d60', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 911.540919] env[63028]: DEBUG oslo.service.loopingcall [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 911.541792] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 911.542046] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9078868f-bb40-4108-b8d4-3fc25ad8b47b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.565938] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 911.565938] env[63028]: value = "task-2735813" [ 911.565938] env[63028]: _type = "Task" [ 911.565938] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.575841] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735813, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.672983] env[63028]: DEBUG oslo_vmware.api [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735811, 'name': PowerOffVM_Task, 'duration_secs': 0.326373} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.673373] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.673557] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.673823] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d290193-a230-41f8-8620-f66925f7e492 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.732414] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 911.732682] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 911.732843] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleting the datastore file [datastore1] cd11b318-9158-4f1d-8aa8-1c9d565bb5d5 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.733114] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0cfe1b38-b7eb-41fa-b4c8-cb62006eb702 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.737766] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.741635] env[63028]: DEBUG oslo_vmware.api [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 911.741635] env[63028]: value = "task-2735815" [ 911.741635] env[63028]: _type = "Task" [ 911.741635] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.746876] env[63028]: DEBUG nova.scheduler.client.report [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 911.753518] env[63028]: DEBUG oslo_vmware.api [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735815, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.798121] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735812, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.077631] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735813, 'name': CreateVM_Task, 'duration_secs': 0.320353} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.077794] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 912.078510] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.078688] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.078998] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 912.079266] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26c5dbef-acb6-4c92-81ff-fd8b8e81b91c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.083706] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 912.083706] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52711adc-3a71-b66d-8724-f1e20d55cec9" [ 912.083706] env[63028]: _type = "Task" [ 912.083706] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.091534] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52711adc-3a71-b66d-8724-f1e20d55cec9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.226010] env[63028]: DEBUG nova.network.neutron [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Updated VIF entry in instance network info cache for port 8ed03cbb-e718-4686-a803-4082f3e01d60. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 912.226392] env[63028]: DEBUG nova.network.neutron [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Updating instance_info_cache with network_info: [{"id": "8ed03cbb-e718-4686-a803-4082f3e01d60", "address": "fa:16:3e:66:85:8a", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ed03cbb-e7", "ovs_interfaceid": "8ed03cbb-e718-4686-a803-4082f3e01d60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.252243] env[63028]: DEBUG oslo_vmware.api [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735815, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.278324} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.252528] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 912.252719] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 912.252930] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 912.253119] env[63028]: INFO nova.compute.manager [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 912.253365] env[63028]: DEBUG oslo.service.loopingcall [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.253549] env[63028]: DEBUG nova.compute.manager [-] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 912.253643] env[63028]: DEBUG nova.network.neutron [-] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 912.255699] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.584s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.256181] env[63028]: DEBUG nova.compute.manager [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 912.259562] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.583s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.259806] env[63028]: DEBUG nova.objects.instance [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lazy-loading 'resources' on Instance uuid a2f7d7c6-7931-4b21-a29c-bb9965577210 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 912.298391] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735812, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.594600] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52711adc-3a71-b66d-8724-f1e20d55cec9, 'name': SearchDatastore_Task, 'duration_secs': 0.020157} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.594816] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.595043] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 912.595303] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.595454] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.595651] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 912.595932] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e74eedc5-d7ff-48ed-925c-81392abeb068 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.604642] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 912.604799] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 912.605752] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1b2a464-73a8-4c60-a7aa-5ebe687c4fc9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.611160] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 912.611160] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52887f98-f18c-0d0d-3eca-11a37c5ff777" [ 912.611160] env[63028]: _type = "Task" [ 912.611160] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.618221] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52887f98-f18c-0d0d-3eca-11a37c5ff777, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.728811] env[63028]: DEBUG oslo_concurrency.lockutils [req-22f2df5d-1928-4d79-8724-54235b077523 req-17dceb55-6b39-4ed5-9a8c-eb1ebc5a6b84 service nova] Releasing lock "refresh_cache-addcf94a-1a56-49ff-8adb-3ce7f2d1e09e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.767402] env[63028]: DEBUG nova.compute.utils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 912.768927] env[63028]: DEBUG nova.compute.manager [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 912.769115] env[63028]: DEBUG nova.network.neutron [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 912.804114] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735812, 'name': Rename_Task, 'duration_secs': 1.222819} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.804387] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 912.804426] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ed50371-cbef-489c-bbd5-475609fd029c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.812224] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 912.812224] env[63028]: value = "task-2735816" [ 912.812224] env[63028]: _type = "Task" [ 912.812224] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.818069] env[63028]: DEBUG nova.policy [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3ed8f5b3d7b4be99d3b4649e156af58', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '847e89af959a4266ab55c1d2106ba8fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 912.823166] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735816, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.027129] env[63028]: DEBUG nova.network.neutron [-] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.122867] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52887f98-f18c-0d0d-3eca-11a37c5ff777, 'name': SearchDatastore_Task, 'duration_secs': 0.008594} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.127695] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5384f6d9-d586-4608-9b3f-556716d4fd7d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.133210] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 913.133210] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a5227d-4e23-9b90-c958-3d06bd6524a9" [ 913.133210] env[63028]: _type = "Task" [ 913.133210] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.148058] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a5227d-4e23-9b90-c958-3d06bd6524a9, 'name': SearchDatastore_Task, 'duration_secs': 0.009405} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.148058] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.148058] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] addcf94a-1a56-49ff-8adb-3ce7f2d1e09e/addcf94a-1a56-49ff-8adb-3ce7f2d1e09e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 913.148058] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91417d41-68b6-4404-89a5-9729a1fe7b93 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.159017] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 913.159017] env[63028]: value = "task-2735817" [ 913.159017] env[63028]: _type = "Task" [ 913.159017] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.164597] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735817, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.214610] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f0b994-6e72-4015-a036-8eaf9800a0c5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.224434] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250218da-04b9-420d-8ac0-fe253473ae99 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.262348] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ded9b5-279d-4c1c-9448-85fd47325d1c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.270985] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5003c20e-a700-4258-9bb7-b0ad0cf4ad8d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.276119] env[63028]: DEBUG nova.compute.manager [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 913.292995] env[63028]: DEBUG nova.compute.provider_tree [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.324287] env[63028]: DEBUG oslo_vmware.api [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735816, 'name': PowerOnVM_Task, 'duration_secs': 0.44128} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.324678] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 913.324851] env[63028]: INFO nova.compute.manager [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Took 8.43 seconds to spawn the instance on the hypervisor. [ 913.325068] env[63028]: DEBUG nova.compute.manager [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 913.325952] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58a02f9-8b89-4de6-8683-d60d8039fb8c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.424499] env[63028]: DEBUG nova.compute.manager [req-d7d6f391-d972-43ce-8f80-587a28ec8659 req-52ece25e-467e-40f2-a7a6-19ffbce9a71b service nova] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Received event network-vif-deleted-34861b32-33da-4ca0-b4ae-0031ab0f8619 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 913.469210] env[63028]: DEBUG nova.network.neutron [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Successfully created port: 11a8272a-a9ff-4d48-860e-8ee1b781a6ab {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 913.528419] env[63028]: INFO nova.compute.manager [-] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Took 1.27 seconds to deallocate network for instance. [ 913.666254] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735817, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467456} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.666542] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] addcf94a-1a56-49ff-8adb-3ce7f2d1e09e/addcf94a-1a56-49ff-8adb-3ce7f2d1e09e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 913.666754] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 913.667010] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41309a61-ead5-4454-a6d3-ee2b02df391d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.673424] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 913.673424] env[63028]: value = "task-2735818" [ 913.673424] env[63028]: _type = "Task" [ 913.673424] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.680993] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735818, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.801170] env[63028]: DEBUG nova.scheduler.client.report [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.845405] env[63028]: INFO nova.compute.manager [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Took 47.68 seconds to build instance. [ 914.036140] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.183440] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735818, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067013} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.183705] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 914.184476] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0349d857-8f66-45fd-a4a0-41503a9d0c50 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.205473] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] addcf94a-1a56-49ff-8adb-3ce7f2d1e09e/addcf94a-1a56-49ff-8adb-3ce7f2d1e09e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 914.205749] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21141c2c-1064-4700-8ae9-b5a313f92a94 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.225290] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 914.225290] env[63028]: value = "task-2735819" [ 914.225290] env[63028]: _type = "Task" [ 914.225290] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.233342] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735819, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.287504] env[63028]: DEBUG nova.compute.manager [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 914.309443] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.050s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.311574] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 25.012s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.318625] env[63028]: DEBUG nova.virt.hardware [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 914.319030] env[63028]: DEBUG nova.virt.hardware [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 914.319098] env[63028]: DEBUG nova.virt.hardware [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 914.319282] env[63028]: DEBUG nova.virt.hardware [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 914.319444] env[63028]: DEBUG nova.virt.hardware [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 914.319653] env[63028]: DEBUG nova.virt.hardware [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 914.319780] env[63028]: DEBUG nova.virt.hardware [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 914.319949] env[63028]: DEBUG nova.virt.hardware [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 914.320149] env[63028]: DEBUG nova.virt.hardware [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 914.320313] env[63028]: DEBUG nova.virt.hardware [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 914.320486] env[63028]: DEBUG nova.virt.hardware [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 914.321342] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a296e6c-1d61-4909-b603-2069772be90a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.329795] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d395d35-8aec-4d69-8c85-7908db78c8f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.336411] env[63028]: INFO nova.scheduler.client.report [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Deleted allocations for instance a2f7d7c6-7931-4b21-a29c-bb9965577210 [ 914.349603] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9c87809b-970a-44ae-a0c9-ae8da38668e0 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.271s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.735676] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735819, 'name': ReconfigVM_Task, 'duration_secs': 0.324423} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.735964] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Reconfigured VM instance instance-0000004d to attach disk [datastore2] addcf94a-1a56-49ff-8adb-3ce7f2d1e09e/addcf94a-1a56-49ff-8adb-3ce7f2d1e09e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 914.736641] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10feea9f-cb26-4c13-ae42-4aceb8fb2600 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.743388] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 914.743388] env[63028]: value = "task-2735820" [ 914.743388] env[63028]: _type = "Task" [ 914.743388] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.753212] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735820, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.847795] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43a912bd-53cd-4738-bc4d-db229f7635f9 tempest-ImagesOneServerNegativeTestJSON-1777388496 tempest-ImagesOneServerNegativeTestJSON-1777388496-project-member] Lock "a2f7d7c6-7931-4b21-a29c-bb9965577210" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.580s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.089449] env[63028]: DEBUG nova.network.neutron [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Successfully updated port: 11a8272a-a9ff-4d48-860e-8ee1b781a6ab {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 915.258552] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735820, 'name': Rename_Task, 'duration_secs': 0.138721} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.259238] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 915.259620] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe0f9e27-5d00-4132-91ab-edebec18a5fa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.272018] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 915.272018] env[63028]: value = "task-2735821" [ 915.272018] env[63028]: _type = "Task" [ 915.272018] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.280302] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735821, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.327979] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d435d470-88fa-4a26-b01c-187c237756b3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.335280] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be3428d2-2ffd-4a79-9a3f-364240171cd9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.367601] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403949e6-4e3f-45de-bc28-bf45ad02d7e9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.375842] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4797dd8-16b0-4f7b-b139-f98d7855be28 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.389417] env[63028]: DEBUG nova.compute.provider_tree [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.452883] env[63028]: DEBUG nova.compute.manager [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Received event network-vif-plugged-11a8272a-a9ff-4d48-860e-8ee1b781a6ab {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 915.453685] env[63028]: DEBUG oslo_concurrency.lockutils [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] Acquiring lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.455059] env[63028]: DEBUG oslo_concurrency.lockutils [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.455059] env[63028]: DEBUG oslo_concurrency.lockutils [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.455059] env[63028]: DEBUG nova.compute.manager [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] No waiting events found dispatching network-vif-plugged-11a8272a-a9ff-4d48-860e-8ee1b781a6ab {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 915.455059] env[63028]: WARNING nova.compute.manager [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Received unexpected event network-vif-plugged-11a8272a-a9ff-4d48-860e-8ee1b781a6ab for instance with vm_state building and task_state spawning. [ 915.455059] env[63028]: DEBUG nova.compute.manager [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Received event network-changed-11a8272a-a9ff-4d48-860e-8ee1b781a6ab {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 915.455059] env[63028]: DEBUG nova.compute.manager [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Refreshing instance network info cache due to event network-changed-11a8272a-a9ff-4d48-860e-8ee1b781a6ab. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 915.455059] env[63028]: DEBUG oslo_concurrency.lockutils [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] Acquiring lock "refresh_cache-46dc76bc-854f-46ad-9db5-21cf6f40fb21" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.455587] env[63028]: DEBUG oslo_concurrency.lockutils [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] Acquired lock "refresh_cache-46dc76bc-854f-46ad-9db5-21cf6f40fb21" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.455587] env[63028]: DEBUG nova.network.neutron [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Refreshing network info cache for port 11a8272a-a9ff-4d48-860e-8ee1b781a6ab {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 915.591543] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "refresh_cache-46dc76bc-854f-46ad-9db5-21cf6f40fb21" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.732859] env[63028]: DEBUG nova.compute.manager [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Stashing vm_state: active {{(pid=63028) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 915.780906] env[63028]: DEBUG oslo_vmware.api [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735821, 'name': PowerOnVM_Task, 'duration_secs': 0.467852} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.781289] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.781673] env[63028]: INFO nova.compute.manager [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Took 6.24 seconds to spawn the instance on the hypervisor. [ 915.781791] env[63028]: DEBUG nova.compute.manager [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 915.784029] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de7f143-7e8f-44b0-96cf-4928b9a2dc49 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.892753] env[63028]: DEBUG nova.scheduler.client.report [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 916.017934] env[63028]: DEBUG nova.network.neutron [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.116451] env[63028]: DEBUG nova.network.neutron [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.259641] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.300213] env[63028]: INFO nova.compute.manager [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Took 43.71 seconds to build instance. [ 916.619063] env[63028]: DEBUG oslo_concurrency.lockutils [req-9d998847-9865-457a-934a-a8d504ca7e97 req-5ca42023-e1ca-425f-8d19-27480931f0a4 service nova] Releasing lock "refresh_cache-46dc76bc-854f-46ad-9db5-21cf6f40fb21" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.619803] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "refresh_cache-46dc76bc-854f-46ad-9db5-21cf6f40fb21" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.619803] env[63028]: DEBUG nova.network.neutron [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 916.802890] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b48420-9bf6-4b47-bba3-9fd80cfa788c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "addcf94a-1a56-49ff-8adb-3ce7f2d1e09e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.222s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.902607] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.591s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.902897] env[63028]: DEBUG nova.compute.manager [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=63028) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 916.907021] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.826s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.907021] env[63028]: DEBUG nova.objects.instance [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lazy-loading 'resources' on Instance uuid d663c2df-ae54-4c50-a70f-e2180700c700 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.088181] env[63028]: INFO nova.compute.manager [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Rebuilding instance [ 917.151637] env[63028]: DEBUG nova.compute.manager [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 917.153934] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413dace2-6da8-4aa8-b8f5-c6071b1d1018 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.173628] env[63028]: DEBUG nova.network.neutron [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 917.367989] env[63028]: DEBUG nova.network.neutron [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Updating instance_info_cache with network_info: [{"id": "11a8272a-a9ff-4d48-860e-8ee1b781a6ab", "address": "fa:16:3e:df:5b:21", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11a8272a-a9", "ovs_interfaceid": "11a8272a-a9ff-4d48-860e-8ee1b781a6ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.473547] env[63028]: INFO nova.scheduler.client.report [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleted allocation for migration b2428275-4704-4b98-81dd-222c963eb311 [ 917.541907] env[63028]: DEBUG oslo_concurrency.lockutils [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "addcf94a-1a56-49ff-8adb-3ce7f2d1e09e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.542221] env[63028]: DEBUG oslo_concurrency.lockutils [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "addcf94a-1a56-49ff-8adb-3ce7f2d1e09e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.542441] env[63028]: DEBUG oslo_concurrency.lockutils [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "addcf94a-1a56-49ff-8adb-3ce7f2d1e09e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.542637] env[63028]: DEBUG oslo_concurrency.lockutils [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "addcf94a-1a56-49ff-8adb-3ce7f2d1e09e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.542805] env[63028]: DEBUG oslo_concurrency.lockutils [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "addcf94a-1a56-49ff-8adb-3ce7f2d1e09e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.545157] env[63028]: INFO nova.compute.manager [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Terminating instance [ 917.826223] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a286fa0a-0296-4fb5-b645-26278e841362 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.833128] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ee95ad-1ceb-40ac-b930-b97a971e4289 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.868674] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29944276-807b-4748-ab28-26d4cf11e52e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.871864] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "refresh_cache-46dc76bc-854f-46ad-9db5-21cf6f40fb21" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.872307] env[63028]: DEBUG nova.compute.manager [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Instance network_info: |[{"id": "11a8272a-a9ff-4d48-860e-8ee1b781a6ab", "address": "fa:16:3e:df:5b:21", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11a8272a-a9", "ovs_interfaceid": "11a8272a-a9ff-4d48-860e-8ee1b781a6ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 917.872841] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:5b:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '11a8272a-a9ff-4d48-860e-8ee1b781a6ab', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 917.880292] env[63028]: DEBUG oslo.service.loopingcall [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 917.880928] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 917.881169] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-329c0a09-ed9c-485f-8233-62522a1a3414 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.900682] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeeb6214-16cb-4686-8f35-0967e6fa1818 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.905882] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 917.905882] env[63028]: value = "task-2735822" [ 917.905882] env[63028]: _type = "Task" [ 917.905882] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.917197] env[63028]: DEBUG nova.compute.provider_tree [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.923846] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735822, 'name': CreateVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.985845] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7fd88a7b-0c2e-4205-9b5e-08f7b653f3a2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 32.300s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.036025] env[63028]: DEBUG nova.objects.instance [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'flavor' on Instance uuid c06813c4-472d-4bf9-84ec-0d01306bcd48 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.049649] env[63028]: DEBUG nova.compute.manager [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 918.049960] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 918.050951] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d1a019-1f5b-4ff0-994a-d1b9dcbf9977 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.060199] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.060479] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3f0c62a-ef5d-4b7e-b5f1-40cd7711762c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.066539] env[63028]: DEBUG oslo_vmware.api [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 918.066539] env[63028]: value = "task-2735823" [ 918.066539] env[63028]: _type = "Task" [ 918.066539] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.074907] env[63028]: DEBUG oslo_vmware.api [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.170891] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.171366] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3ae5934-c7dd-43cc-b1a7-d79d10eddd96 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.179730] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Waiting for the task: (returnval){ [ 918.179730] env[63028]: value = "task-2735824" [ 918.179730] env[63028]: _type = "Task" [ 918.179730] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.191504] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735824, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.415980] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735822, 'name': CreateVM_Task, 'duration_secs': 0.313438} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.416222] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 918.416920] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.417103] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.417416] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 918.417663] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1576502-095b-4efd-8ae6-bf72aed519b9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.420144] env[63028]: DEBUG nova.scheduler.client.report [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 918.424661] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 918.424661] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5228059d-8ff4-422c-aca3-4f9a731d422d" [ 918.424661] env[63028]: _type = "Task" [ 918.424661] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.435571] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5228059d-8ff4-422c-aca3-4f9a731d422d, 'name': SearchDatastore_Task, 'duration_secs': 0.009016} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.436445] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.436675] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 918.436898] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.437053] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.437239] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.437707] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb01f19e-c17b-4a26-bc71-3956b4f0b6c6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.444882] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.445073] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 918.445948] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b389423-1ba3-46e0-b78c-fa240266e6b2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.450680] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 918.450680] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5241466f-8096-f0d7-d7a1-53421d901388" [ 918.450680] env[63028]: _type = "Task" [ 918.450680] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.458159] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5241466f-8096-f0d7-d7a1-53421d901388, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.541041] env[63028]: DEBUG oslo_concurrency.lockutils [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.541259] env[63028]: DEBUG oslo_concurrency.lockutils [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.541501] env[63028]: DEBUG nova.network.neutron [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 918.541723] env[63028]: DEBUG nova.objects.instance [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'info_cache' on Instance uuid c06813c4-472d-4bf9-84ec-0d01306bcd48 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.576161] env[63028]: DEBUG oslo_vmware.api [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735823, 'name': PowerOffVM_Task, 'duration_secs': 0.184434} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.576456] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 918.576630] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 918.576885] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed92b06d-372a-4487-9bc9-ec75fadb1360 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.634459] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 918.634831] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 918.634831] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleting the datastore file [datastore2] addcf94a-1a56-49ff-8adb-3ce7f2d1e09e {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 918.635120] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e0b9bf2-0f91-46e2-a792-2a783fd39a75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.641525] env[63028]: DEBUG oslo_vmware.api [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 918.641525] env[63028]: value = "task-2735826" [ 918.641525] env[63028]: _type = "Task" [ 918.641525] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.650362] env[63028]: DEBUG oslo_vmware.api [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.700503] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735824, 'name': PowerOffVM_Task, 'duration_secs': 0.219809} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.700503] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 918.701240] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.701532] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9799550-463a-4051-9e93-f0245279abc0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.707947] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Waiting for the task: (returnval){ [ 918.707947] env[63028]: value = "task-2735827" [ 918.707947] env[63028]: _type = "Task" [ 918.707947] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.718456] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 918.719170] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 918.719170] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550662', 'volume_id': '4246155e-0977-4f2a-b135-72a3849826ce', 'name': 'volume-4246155e-0977-4f2a-b135-72a3849826ce', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '600195de-ceb4-41a6-9ade-dda8b898e4db', 'attached_at': '', 'detached_at': '', 'volume_id': '4246155e-0977-4f2a-b135-72a3849826ce', 'serial': '4246155e-0977-4f2a-b135-72a3849826ce'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 918.719711] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec1e5d8-ce32-4153-ad94-230e5aa81498 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.747849] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f096031e-6798-4da0-9dba-cbdd27aaf3d5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.753640] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a46cff-6c7d-4b76-a09f-59e213cae2c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.771625] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b0fe87-3a61-49ba-9422-ed6a04e5a9f4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.788193] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] The volume has not been displaced from its original location: [datastore2] volume-4246155e-0977-4f2a-b135-72a3849826ce/volume-4246155e-0977-4f2a-b135-72a3849826ce.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 918.796160] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Reconfiguring VM instance instance-0000002c to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 918.796160] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f675b23d-cdf9-4913-b323-51000281bc89 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.811532] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Waiting for the task: (returnval){ [ 918.811532] env[63028]: value = "task-2735828" [ 918.811532] env[63028]: _type = "Task" [ 918.811532] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.821325] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735828, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.926458] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.021s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.929296] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.960s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.929296] env[63028]: DEBUG nova.objects.instance [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lazy-loading 'resources' on Instance uuid 70147f2f-0b5e-4343-84e4-8bc195a5485d {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.951934] env[63028]: INFO nova.scheduler.client.report [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Deleted allocations for instance d663c2df-ae54-4c50-a70f-e2180700c700 [ 918.964564] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5241466f-8096-f0d7-d7a1-53421d901388, 'name': SearchDatastore_Task, 'duration_secs': 0.007655} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.965156] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09a65a8b-5553-45d4-bad6-25158bd1b73e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.970066] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 918.970066] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5259796a-20b2-773f-5feb-6f0149aa632d" [ 918.970066] env[63028]: _type = "Task" [ 918.970066] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.977744] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5259796a-20b2-773f-5feb-6f0149aa632d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.045206] env[63028]: DEBUG nova.objects.base [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 919.151145] env[63028]: DEBUG oslo_vmware.api [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165695} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.151408] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 919.151644] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 919.152525] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 919.152525] env[63028]: INFO nova.compute.manager [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Took 1.10 seconds to destroy the instance on the hypervisor. [ 919.152525] env[63028]: DEBUG oslo.service.loopingcall [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 919.152525] env[63028]: DEBUG nova.compute.manager [-] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 919.152784] env[63028]: DEBUG nova.network.neutron [-] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 919.329171] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735828, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.462127] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8542636e-9564-4f6b-a973-f1b3ae0f71e1 tempest-VolumesAdminNegativeTest-34143088 tempest-VolumesAdminNegativeTest-34143088-project-member] Lock "d663c2df-ae54-4c50-a70f-e2180700c700" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.016s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.486405] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5259796a-20b2-773f-5feb-6f0149aa632d, 'name': SearchDatastore_Task, 'duration_secs': 0.008956} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.489822] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.490704] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 46dc76bc-854f-46ad-9db5-21cf6f40fb21/46dc76bc-854f-46ad-9db5-21cf6f40fb21.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 919.490704] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b72ebe4-1c94-4a5c-9179-db74d25adb73 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.499622] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 919.499622] env[63028]: value = "task-2735829" [ 919.499622] env[63028]: _type = "Task" [ 919.499622] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.508858] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735829, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.598305] env[63028]: DEBUG nova.compute.manager [req-8fc8f6f2-d7ca-4cb0-b058-d7bee9836a4f req-1ed3a16f-8a76-4537-9155-2ee7a2d238a8 service nova] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Received event network-vif-deleted-8ed03cbb-e718-4686-a803-4082f3e01d60 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 919.598516] env[63028]: INFO nova.compute.manager [req-8fc8f6f2-d7ca-4cb0-b058-d7bee9836a4f req-1ed3a16f-8a76-4537-9155-2ee7a2d238a8 service nova] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Neutron deleted interface 8ed03cbb-e718-4686-a803-4082f3e01d60; detaching it from the instance and deleting it from the info cache [ 919.598750] env[63028]: DEBUG nova.network.neutron [req-8fc8f6f2-d7ca-4cb0-b058-d7bee9836a4f req-1ed3a16f-8a76-4537-9155-2ee7a2d238a8 service nova] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.830368] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735828, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.855379] env[63028]: DEBUG nova.network.neutron [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance_info_cache with network_info: [{"id": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "address": "fa:16:3e:cc:b1:42", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9be02f8-7e", "ovs_interfaceid": "e9be02f8-7ea6-45eb-a1cb-65fb95285caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.857991] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42bb2a50-8760-4755-b826-c76d8ee17c07 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.865945] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a428c2-a0e1-4b42-b7dc-7ace1c31f7b3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.897160] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4905a3-bd73-4c6a-b097-5edee17d2962 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.906015] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec80c44a-02a3-4288-83c6-f42e81152ada {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.919452] env[63028]: DEBUG nova.compute.provider_tree [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.014789] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735829, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.084304] env[63028]: DEBUG nova.network.neutron [-] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.101036] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-097e0f30-c84e-4dc0-b3bd-072a0317e993 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.113347] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e595aa8f-7790-4e0e-adc2-715d3aa86c69 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.156692] env[63028]: DEBUG nova.compute.manager [req-8fc8f6f2-d7ca-4cb0-b058-d7bee9836a4f req-1ed3a16f-8a76-4537-9155-2ee7a2d238a8 service nova] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Detach interface failed, port_id=8ed03cbb-e718-4686-a803-4082f3e01d60, reason: Instance addcf94a-1a56-49ff-8adb-3ce7f2d1e09e could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 920.324578] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735828, 'name': ReconfigVM_Task, 'duration_secs': 1.193724} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.324910] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Reconfigured VM instance instance-0000002c to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 920.330286] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e82908f3-50fa-48ad-be17-d07d0d4c1903 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.345913] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Waiting for the task: (returnval){ [ 920.345913] env[63028]: value = "task-2735830" [ 920.345913] env[63028]: _type = "Task" [ 920.345913] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.354671] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735830, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.361567] env[63028]: DEBUG oslo_concurrency.lockutils [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-c06813c4-472d-4bf9-84ec-0d01306bcd48" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.422663] env[63028]: DEBUG nova.scheduler.client.report [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 920.511766] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735829, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.930697} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.512043] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 46dc76bc-854f-46ad-9db5-21cf6f40fb21/46dc76bc-854f-46ad-9db5-21cf6f40fb21.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 920.512296] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 920.512562] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a651ce24-29e9-4883-9071-88c877c07266 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.520168] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 920.520168] env[63028]: value = "task-2735831" [ 920.520168] env[63028]: _type = "Task" [ 920.520168] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.526591] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735831, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.586940] env[63028]: INFO nova.compute.manager [-] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Took 1.43 seconds to deallocate network for instance. [ 920.855781] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735830, 'name': ReconfigVM_Task, 'duration_secs': 0.153506} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.856116] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550662', 'volume_id': '4246155e-0977-4f2a-b135-72a3849826ce', 'name': 'volume-4246155e-0977-4f2a-b135-72a3849826ce', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '600195de-ceb4-41a6-9ade-dda8b898e4db', 'attached_at': '', 'detached_at': '', 'volume_id': '4246155e-0977-4f2a-b135-72a3849826ce', 'serial': '4246155e-0977-4f2a-b135-72a3849826ce'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 920.856493] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 920.857276] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac695f94-c118-4592-8079-3a4153526846 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.863570] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 920.865648] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6592cb49-5092-45d6-bc85-4bf57f3250e0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.930180] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.934166] env[63028]: DEBUG oslo_concurrency.lockutils [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.073s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.934588] env[63028]: DEBUG nova.objects.instance [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lazy-loading 'resources' on Instance uuid 15326f55-2db8-47c3-b1fd-ce8ba1174c79 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 920.945697] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 920.947093] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 920.947339] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Deleting the datastore file [datastore2] 600195de-ceb4-41a6-9ade-dda8b898e4db {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 920.947640] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e59c425-d5ea-4050-aced-033ec8416784 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.959549] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Waiting for the task: (returnval){ [ 920.959549] env[63028]: value = "task-2735833" [ 920.959549] env[63028]: _type = "Task" [ 920.959549] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.969781] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735833, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.971615] env[63028]: INFO nova.scheduler.client.report [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Deleted allocations for instance 70147f2f-0b5e-4343-84e4-8bc195a5485d [ 921.030710] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735831, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.163999} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.031131] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 921.031749] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f349b8-2359-4f81-9460-d050d8860c12 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.058059] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 46dc76bc-854f-46ad-9db5-21cf6f40fb21/46dc76bc-854f-46ad-9db5-21cf6f40fb21.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 921.058746] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e32ca21-3720-47b4-b41f-806eb439b724 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.080900] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 921.080900] env[63028]: value = "task-2735834" [ 921.080900] env[63028]: _type = "Task" [ 921.080900] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.089404] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735834, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.099813] env[63028]: DEBUG oslo_concurrency.lockutils [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.368064] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 921.369299] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9fd3f3ad-117b-4472-b366-c6aa9587f829 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.377550] env[63028]: DEBUG oslo_vmware.api [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 921.377550] env[63028]: value = "task-2735835" [ 921.377550] env[63028]: _type = "Task" [ 921.377550] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.389720] env[63028]: DEBUG oslo_vmware.api [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735835, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.471898] env[63028]: DEBUG oslo_vmware.api [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Task: {'id': task-2735833, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127612} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.472195] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 921.472385] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 921.472814] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 921.485758] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f4e9a3cc-4341-4479-be04-a674123e04ce tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "70147f2f-0b5e-4343-84e4-8bc195a5485d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.974s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.551187] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 921.551578] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e3717ed-bd6e-4f31-82a3-141ceeb2ec82 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.567015] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adc9c0d-38a2-4549-a185-5b6569eee79c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.594065] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735834, 'name': ReconfigVM_Task, 'duration_secs': 0.286385} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.595016] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 46dc76bc-854f-46ad-9db5-21cf6f40fb21/46dc76bc-854f-46ad-9db5-21cf6f40fb21.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.595167] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52e56eab-269e-494e-a661-4decc44da80a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.622017] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 921.622017] env[63028]: value = "task-2735836" [ 921.622017] env[63028]: _type = "Task" [ 921.622017] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.623709] env[63028]: ERROR nova.compute.manager [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Failed to detach volume 4246155e-0977-4f2a-b135-72a3849826ce from /dev/sda: nova.exception.InstanceNotFound: Instance 600195de-ceb4-41a6-9ade-dda8b898e4db could not be found. [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Traceback (most recent call last): [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] self.driver.rebuild(**kwargs) [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] raise NotImplementedError() [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] NotImplementedError [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] During handling of the above exception, another exception occurred: [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Traceback (most recent call last): [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] self.driver.detach_volume(context, old_connection_info, [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] return self._volumeops.detach_volume(connection_info, instance) [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] self._detach_volume_vmdk(connection_info, instance) [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] stable_ref.fetch_moref(session) [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] raise exception.InstanceNotFound(instance_id=self._uuid) [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] nova.exception.InstanceNotFound: Instance 600195de-ceb4-41a6-9ade-dda8b898e4db could not be found. [ 921.623709] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] [ 921.641695] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735836, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.836221] env[63028]: DEBUG nova.compute.utils [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Build of instance 600195de-ceb4-41a6-9ade-dda8b898e4db aborted: Failed to rebuild volume backed instance. {{(pid=63028) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 921.845395] env[63028]: ERROR nova.compute.manager [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 600195de-ceb4-41a6-9ade-dda8b898e4db aborted: Failed to rebuild volume backed instance. [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Traceback (most recent call last): [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] self.driver.rebuild(**kwargs) [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] raise NotImplementedError() [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] NotImplementedError [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] During handling of the above exception, another exception occurred: [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Traceback (most recent call last): [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] self._detach_root_volume(context, instance, root_bdm) [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] with excutils.save_and_reraise_exception(): [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] self.force_reraise() [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] raise self.value [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] self.driver.detach_volume(context, old_connection_info, [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] return self._volumeops.detach_volume(connection_info, instance) [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] self._detach_volume_vmdk(connection_info, instance) [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] stable_ref.fetch_moref(session) [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] raise exception.InstanceNotFound(instance_id=self._uuid) [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] nova.exception.InstanceNotFound: Instance 600195de-ceb4-41a6-9ade-dda8b898e4db could not be found. [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] During handling of the above exception, another exception occurred: [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Traceback (most recent call last): [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/compute/manager.py", line 11382, in _error_out_instance_on_exception [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] yield [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 921.845395] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] self._do_rebuild_instance_with_claim( [ 921.847408] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 921.847408] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] self._do_rebuild_instance( [ 921.847408] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 921.847408] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] self._rebuild_default_impl(**kwargs) [ 921.847408] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 921.847408] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] self._rebuild_volume_backed_instance( [ 921.847408] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 921.847408] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] raise exception.BuildAbortException( [ 921.847408] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] nova.exception.BuildAbortException: Build of instance 600195de-ceb4-41a6-9ade-dda8b898e4db aborted: Failed to rebuild volume backed instance. [ 921.847408] env[63028]: ERROR nova.compute.manager [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] [ 921.894379] env[63028]: DEBUG oslo_vmware.api [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735835, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.005455] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93de7aef-ec8e-47a2-b66c-1da2a7c9b1cb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.015107] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a63655-25fd-4ce8-9c3c-f706d3a863f4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.047503] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389c5798-fe0c-4906-bab5-da8837a4c690 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.055992] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941e40b3-de79-4673-9e10-66246942d6ab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.070054] env[63028]: DEBUG nova.compute.provider_tree [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.139558] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735836, 'name': Rename_Task, 'duration_secs': 0.215681} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.139887] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 922.140191] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d233cd5-46ef-4a00-8367-0e87fd43e375 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.147182] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 922.147182] env[63028]: value = "task-2735837" [ 922.147182] env[63028]: _type = "Task" [ 922.147182] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.160670] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735837, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.390810] env[63028]: DEBUG oslo_vmware.api [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735835, 'name': PowerOnVM_Task, 'duration_secs': 0.546515} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.391093] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.391305] env[63028]: DEBUG nova.compute.manager [None req-67b62f98-35f4-41af-a048-367cf53dcac8 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.392132] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc6a42d-26c7-4395-ac0b-e5e272e1ceba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.574271] env[63028]: DEBUG nova.scheduler.client.report [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 922.660382] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735837, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.085750] env[63028]: DEBUG oslo_concurrency.lockutils [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.149s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.087374] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.808s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.087651] env[63028]: DEBUG nova.objects.instance [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lazy-loading 'resources' on Instance uuid 3b90dbb8-66ce-435f-beae-5464720bfb3e {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.123241] env[63028]: INFO nova.scheduler.client.report [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Deleted allocations for instance 15326f55-2db8-47c3-b1fd-ce8ba1174c79 [ 923.157803] env[63028]: DEBUG oslo_vmware.api [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735837, 'name': PowerOnVM_Task, 'duration_secs': 0.911128} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.159922] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 923.159922] env[63028]: INFO nova.compute.manager [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Took 8.87 seconds to spawn the instance on the hypervisor. [ 923.159922] env[63028]: DEBUG nova.compute.manager [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 923.159922] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf03fe8-c47f-4bde-b1c0-c0843b2129e0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.464085] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "c06813c4-472d-4bf9-84ec-0d01306bcd48" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.464379] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.464592] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "c06813c4-472d-4bf9-84ec-0d01306bcd48-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.464768] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.464934] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.469440] env[63028]: INFO nova.compute.manager [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Terminating instance [ 923.637505] env[63028]: DEBUG oslo_concurrency.lockutils [None req-656b4c07-6b90-4bb5-896d-d42945654795 tempest-ServersAdminNegativeTestJSON-1478593092 tempest-ServersAdminNegativeTestJSON-1478593092-project-member] Lock "15326f55-2db8-47c3-b1fd-ce8ba1174c79" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.312s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.683244] env[63028]: INFO nova.compute.manager [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Took 44.89 seconds to build instance. [ 923.847710] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "bb2b405e-6207-4718-9485-0271d26c160f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.848472] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "bb2b405e-6207-4718-9485-0271d26c160f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.871752] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.927327] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "a97224e8-d69b-4c62-ab96-7cef037ef39b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.928391] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "a97224e8-d69b-4c62-ab96-7cef037ef39b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.976829] env[63028]: DEBUG nova.compute.manager [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 923.977553] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 923.977553] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.977553] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.979305] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb58543-3ce4-49c9-ba31-f723209ed0f4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.991327] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 923.991667] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d34e940-9132-4f32-aaff-34d77044d265 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.000465] env[63028]: DEBUG oslo_vmware.api [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 924.000465] env[63028]: value = "task-2735838" [ 924.000465] env[63028]: _type = "Task" [ 924.000465] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.013852] env[63028]: DEBUG oslo_vmware.api [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735838, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.139732] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5409e07-5d7f-4de7-82ad-c9d8b9992a4c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.148965] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c5ed24-607b-4c83-82ab-90245a8c4e3b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.187326] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d0adac76-4bdd-4403-8347-182796e98476 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.407s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.189563] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c806ea8a-3376-43db-a142-b9300df8ad88 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.201874] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9bb7d3-d8ef-433f-85c2-500dd1905af6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.223967] env[63028]: DEBUG nova.compute.provider_tree [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.351118] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 924.441304] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 924.486557] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 924.516570] env[63028]: DEBUG oslo_vmware.api [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735838, 'name': PowerOffVM_Task, 'duration_secs': 0.203036} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.516968] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 924.517057] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 924.517735] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4430edef-45f9-461d-87d6-dfd5d6a9b6f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.643807] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 924.644253] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 924.644253] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleting the datastore file [datastore2] c06813c4-472d-4bf9-84ec-0d01306bcd48 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 924.644508] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3fc600ab-c368-4410-bbc8-451fee6a2813 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.652834] env[63028]: DEBUG oslo_vmware.api [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 924.652834] env[63028]: value = "task-2735840" [ 924.652834] env[63028]: _type = "Task" [ 924.652834] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.696292] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "a1d00736-1a8d-46e0-9358-46e848b94797" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.696604] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "a1d00736-1a8d-46e0-9358-46e848b94797" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.696840] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "a1d00736-1a8d-46e0-9358-46e848b94797-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.697197] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "a1d00736-1a8d-46e0-9358-46e848b94797-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.697409] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "a1d00736-1a8d-46e0-9358-46e848b94797-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.699802] env[63028]: INFO nova.compute.manager [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Terminating instance [ 924.713022] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Acquiring lock "600195de-ceb4-41a6-9ade-dda8b898e4db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.713280] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lock "600195de-ceb4-41a6-9ade-dda8b898e4db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.713486] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Acquiring lock "600195de-ceb4-41a6-9ade-dda8b898e4db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.713687] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lock "600195de-ceb4-41a6-9ade-dda8b898e4db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.713875] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lock "600195de-ceb4-41a6-9ade-dda8b898e4db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.716832] env[63028]: INFO nova.compute.manager [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Terminating instance [ 924.726951] env[63028]: DEBUG nova.scheduler.client.report [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 924.880511] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.968857] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.023816] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.164592] env[63028]: DEBUG oslo_vmware.api [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2735840, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165581} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.164879] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 925.165055] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 925.165246] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 925.165490] env[63028]: INFO nova.compute.manager [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Took 1.19 seconds to destroy the instance on the hypervisor. [ 925.165664] env[63028]: DEBUG oslo.service.loopingcall [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 925.165884] env[63028]: DEBUG nova.compute.manager [-] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 925.165978] env[63028]: DEBUG nova.network.neutron [-] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 925.203765] env[63028]: DEBUG nova.compute.manager [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 925.204011] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.204906] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6cf5b9-8e9f-47ff-9a49-d765448ed688 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.213295] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.213545] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f51e49d-c524-4882-8ae2-678d3f74d183 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.219575] env[63028]: DEBUG nova.compute.manager [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 925.219892] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-37d608d8-d43e-4399-888d-b5c19c3b0fcc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.225705] env[63028]: DEBUG oslo_vmware.api [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 925.225705] env[63028]: value = "task-2735841" [ 925.225705] env[63028]: _type = "Task" [ 925.225705] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.231719] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.146s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.234909] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.547s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.235807] env[63028]: INFO nova.compute.claims [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 925.241036] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb376c55-d413-4701-9ece-4e55de90bd0c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.259361] env[63028]: DEBUG oslo_vmware.api [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735841, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.260350] env[63028]: INFO nova.scheduler.client.report [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted allocations for instance 3b90dbb8-66ce-435f-beae-5464720bfb3e [ 925.291355] env[63028]: WARNING nova.virt.vmwareapi.driver [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 600195de-ceb4-41a6-9ade-dda8b898e4db could not be found. [ 925.291949] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.293324] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da2565cc-6f6c-4608-be76-54cc8c920e7c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.303485] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30048eac-6963-4194-9c51-aacd30b7a513 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.340865] env[63028]: WARNING nova.virt.vmwareapi.vmops [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 600195de-ceb4-41a6-9ade-dda8b898e4db could not be found. [ 925.341136] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 925.341337] env[63028]: INFO nova.compute.manager [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Took 0.12 seconds to destroy the instance on the hypervisor. [ 925.341669] env[63028]: DEBUG oslo.service.loopingcall [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 925.342072] env[63028]: DEBUG nova.compute.manager [-] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 925.342229] env[63028]: DEBUG nova.network.neutron [-] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 925.739315] env[63028]: DEBUG oslo_vmware.api [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735841, 'name': PowerOffVM_Task, 'duration_secs': 0.242414} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.739315] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 925.739315] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 925.739315] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20a5cf84-30a7-4261-b896-75e29aa076ea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.771501] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d9c4e743-0ad3-452d-acd1-8a340ea80d72 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "3b90dbb8-66ce-435f-beae-5464720bfb3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.804s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.801917] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 925.803372] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 925.803372] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Deleting the datastore file [datastore1] a1d00736-1a8d-46e0-9358-46e848b94797 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 925.803563] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ad64508-ebda-46e7-a451-0d815e49fcab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.814838] env[63028]: DEBUG oslo_vmware.api [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for the task: (returnval){ [ 925.814838] env[63028]: value = "task-2735843" [ 925.814838] env[63028]: _type = "Task" [ 925.814838] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.834431] env[63028]: DEBUG oslo_vmware.api [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.882970] env[63028]: DEBUG nova.compute.manager [req-6ec4e6f6-b942-4141-a485-e3a7cf247f0b req-141b197a-d99c-41e9-924a-73614fb43a78 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Received event network-changed-11a8272a-a9ff-4d48-860e-8ee1b781a6ab {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 925.883328] env[63028]: DEBUG nova.compute.manager [req-6ec4e6f6-b942-4141-a485-e3a7cf247f0b req-141b197a-d99c-41e9-924a-73614fb43a78 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Refreshing instance network info cache due to event network-changed-11a8272a-a9ff-4d48-860e-8ee1b781a6ab. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 925.883604] env[63028]: DEBUG oslo_concurrency.lockutils [req-6ec4e6f6-b942-4141-a485-e3a7cf247f0b req-141b197a-d99c-41e9-924a-73614fb43a78 service nova] Acquiring lock "refresh_cache-46dc76bc-854f-46ad-9db5-21cf6f40fb21" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.883770] env[63028]: DEBUG oslo_concurrency.lockutils [req-6ec4e6f6-b942-4141-a485-e3a7cf247f0b req-141b197a-d99c-41e9-924a-73614fb43a78 service nova] Acquired lock "refresh_cache-46dc76bc-854f-46ad-9db5-21cf6f40fb21" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.883933] env[63028]: DEBUG nova.network.neutron [req-6ec4e6f6-b942-4141-a485-e3a7cf247f0b req-141b197a-d99c-41e9-924a-73614fb43a78 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Refreshing network info cache for port 11a8272a-a9ff-4d48-860e-8ee1b781a6ab {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.324208] env[63028]: DEBUG oslo_vmware.api [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Task: {'id': task-2735843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204806} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.327216] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.327458] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 926.327673] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 926.327890] env[63028]: INFO nova.compute.manager [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Took 1.12 seconds to destroy the instance on the hypervisor. [ 926.328183] env[63028]: DEBUG oslo.service.loopingcall [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 926.328604] env[63028]: DEBUG nova.compute.manager [-] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 926.329678] env[63028]: DEBUG nova.network.neutron [-] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.710180] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a81957-9418-4a1e-8486-3e36b44540bd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.726018] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d864599-72db-43bf-ae67-f030de7c4756 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.755540] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b7b0a0-11c1-4867-b5af-3057e6542558 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.763495] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056031e1-010d-43ab-93df-44635e3834e2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.777024] env[63028]: DEBUG nova.compute.provider_tree [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.975850] env[63028]: DEBUG nova.network.neutron [-] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.024222] env[63028]: DEBUG nova.network.neutron [req-6ec4e6f6-b942-4141-a485-e3a7cf247f0b req-141b197a-d99c-41e9-924a-73614fb43a78 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Updated VIF entry in instance network info cache for port 11a8272a-a9ff-4d48-860e-8ee1b781a6ab. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 927.024222] env[63028]: DEBUG nova.network.neutron [req-6ec4e6f6-b942-4141-a485-e3a7cf247f0b req-141b197a-d99c-41e9-924a-73614fb43a78 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Updating instance_info_cache with network_info: [{"id": "11a8272a-a9ff-4d48-860e-8ee1b781a6ab", "address": "fa:16:3e:df:5b:21", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11a8272a-a9", "ovs_interfaceid": "11a8272a-a9ff-4d48-860e-8ee1b781a6ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.281538] env[63028]: DEBUG nova.scheduler.client.report [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 927.383902] env[63028]: DEBUG nova.network.neutron [-] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.479295] env[63028]: INFO nova.compute.manager [-] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Took 2.31 seconds to deallocate network for instance. [ 927.530506] env[63028]: DEBUG oslo_concurrency.lockutils [req-6ec4e6f6-b942-4141-a485-e3a7cf247f0b req-141b197a-d99c-41e9-924a-73614fb43a78 service nova] Releasing lock "refresh_cache-46dc76bc-854f-46ad-9db5-21cf6f40fb21" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.787683] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.790218] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.257s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.790515] env[63028]: DEBUG nova.objects.instance [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lazy-loading 'resources' on Instance uuid 3e45e7f3-a34f-4eab-9fff-1c874c832e2a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.887045] env[63028]: INFO nova.compute.manager [-] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Took 2.54 seconds to deallocate network for instance. [ 927.986044] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.049190] env[63028]: DEBUG nova.compute.manager [req-5992ad06-f034-472d-8888-50cbbf385f72 req-b358865c-0dfe-4105-9bfb-61c7f6df34b9 service nova] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Received event network-vif-deleted-e9be02f8-7ea6-45eb-a1cb-65fb95285caf {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 928.049841] env[63028]: DEBUG nova.compute.manager [req-5992ad06-f034-472d-8888-50cbbf385f72 req-b358865c-0dfe-4105-9bfb-61c7f6df34b9 service nova] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Received event network-vif-deleted-f8cad445-9a0b-4d25-84a3-df0521f45d9f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 928.050209] env[63028]: DEBUG nova.compute.manager [req-5992ad06-f034-472d-8888-50cbbf385f72 req-b358865c-0dfe-4105-9bfb-61c7f6df34b9 service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Received event network-vif-deleted-66cd0102-9651-45e1-8a38-f65e2f7dd800 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 928.050655] env[63028]: INFO nova.compute.manager [req-5992ad06-f034-472d-8888-50cbbf385f72 req-b358865c-0dfe-4105-9bfb-61c7f6df34b9 service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Neutron deleted interface 66cd0102-9651-45e1-8a38-f65e2f7dd800; detaching it from the instance and deleting it from the info cache [ 928.051228] env[63028]: DEBUG nova.network.neutron [req-5992ad06-f034-472d-8888-50cbbf385f72 req-b358865c-0dfe-4105-9bfb-61c7f6df34b9 service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.294173] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquiring lock "ccda51f4-35c5-40c0-8a27-c14a2641a05d" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.294443] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lock "ccda51f4-35c5-40c0-8a27-c14a2641a05d" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.306787] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.306787] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.371877] env[63028]: DEBUG nova.network.neutron [-] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.483495] env[63028]: INFO nova.compute.manager [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Took 0.60 seconds to detach 1 volumes for instance. [ 928.488409] env[63028]: DEBUG nova.compute.manager [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Deleting volume: 4246155e-0977-4f2a-b135-72a3849826ce {{(pid=63028) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 928.554959] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3bb25d6a-964d-47c2-8116-2791117ff8b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.568923] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e092c8a8-c7c6-483d-9436-bfd90c979bc3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.619845] env[63028]: DEBUG nova.compute.manager [req-5992ad06-f034-472d-8888-50cbbf385f72 req-b358865c-0dfe-4105-9bfb-61c7f6df34b9 service nova] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Detach interface failed, port_id=66cd0102-9651-45e1-8a38-f65e2f7dd800, reason: Instance a1d00736-1a8d-46e0-9358-46e848b94797 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 928.800108] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lock "ccda51f4-35c5-40c0-8a27-c14a2641a05d" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.801102] env[63028]: DEBUG nova.compute.manager [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 928.819184] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.819310] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.819421] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.819597] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.819777] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.819938] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.820088] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63028) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 928.820234] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.878532] env[63028]: INFO nova.compute.manager [-] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Took 2.55 seconds to deallocate network for instance. [ 928.948453] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c463d4-22f9-4612-b918-7d4e8da99503 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.959713] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f33c3d9-637a-4fc6-8d62-09df573c8177 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.000944] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49045cba-2500-41f4-b6c1-8edad61ae441 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.006722] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.007108] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.012773] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba8517f-7f7e-4209-9ccf-83bde5581c8a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.027138] env[63028]: DEBUG nova.compute.provider_tree [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.050795] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.320223] env[63028]: DEBUG nova.compute.utils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 929.322690] env[63028]: DEBUG nova.compute.manager [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 929.323688] env[63028]: DEBUG nova.network.neutron [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 929.330025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.334915] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Acquiring lock "455578fa-7468-40dc-8c0a-37ac35e5c0a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.334915] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Lock "455578fa-7468-40dc-8c0a-37ac35e5c0a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.394811] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.425322] env[63028]: DEBUG nova.policy [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67765ae9e77a466b973c0473a42b3517', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7e752aa946d4933b408c345f8601dcd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 929.509318] env[63028]: DEBUG nova.compute.manager [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 929.535195] env[63028]: DEBUG nova.scheduler.client.report [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 929.824634] env[63028]: DEBUG nova.compute.manager [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 929.838919] env[63028]: DEBUG nova.compute.manager [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 930.043296] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.253s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.047032] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.047321] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.779s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.048921] env[63028]: INFO nova.compute.claims [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 930.075883] env[63028]: INFO nova.scheduler.client.report [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleted allocations for instance 3e45e7f3-a34f-4eab-9fff-1c874c832e2a [ 930.279015] env[63028]: DEBUG nova.network.neutron [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Successfully created port: c9de2712-61e5-456a-a822-3ed2c95d6e97 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 930.379384] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.587994] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1208d605-afb6-4f7d-bed8-7b4272afde28 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "3e45e7f3-a34f-4eab-9fff-1c874c832e2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.461s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.847458] env[63028]: DEBUG nova.compute.manager [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 930.896183] env[63028]: DEBUG nova.virt.hardware [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 930.896524] env[63028]: DEBUG nova.virt.hardware [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.896781] env[63028]: DEBUG nova.virt.hardware [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 930.897036] env[63028]: DEBUG nova.virt.hardware [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.897245] env[63028]: DEBUG nova.virt.hardware [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 930.897462] env[63028]: DEBUG nova.virt.hardware [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 930.897734] env[63028]: DEBUG nova.virt.hardware [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 930.897943] env[63028]: DEBUG nova.virt.hardware [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 930.899285] env[63028]: DEBUG nova.virt.hardware [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 930.899517] env[63028]: DEBUG nova.virt.hardware [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 930.899704] env[63028]: DEBUG nova.virt.hardware [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 930.900959] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2266e5b7-9cf3-4f1e-b78a-25bf56e9b5e5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.911235] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2f3844-3181-4187-a265-63749500493e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.557289] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4855c77a-7c3b-4886-94c2-a3085f55ada7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.565064] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc299ab-ac71-42d0-9ca7-b8aeebb1fa07 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.594935] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232f292c-0545-45e1-ac05-718839c688fc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.605053] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128f170b-5c2e-4480-ad84-56dc2c75e883 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.624718] env[63028]: DEBUG nova.compute.provider_tree [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.093127] env[63028]: DEBUG nova.network.neutron [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Successfully updated port: c9de2712-61e5-456a-a822-3ed2c95d6e97 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 932.101096] env[63028]: DEBUG nova.compute.manager [req-bd2d13d4-d80a-426c-9269-9cd40d6be3a8 req-915979c3-76ff-436b-84bd-163fa2be548d service nova] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Received event network-vif-plugged-c9de2712-61e5-456a-a822-3ed2c95d6e97 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 932.101096] env[63028]: DEBUG oslo_concurrency.lockutils [req-bd2d13d4-d80a-426c-9269-9cd40d6be3a8 req-915979c3-76ff-436b-84bd-163fa2be548d service nova] Acquiring lock "8f621e7b-0c76-4f70-830d-09d28a2e0736-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.101096] env[63028]: DEBUG oslo_concurrency.lockutils [req-bd2d13d4-d80a-426c-9269-9cd40d6be3a8 req-915979c3-76ff-436b-84bd-163fa2be548d service nova] Lock "8f621e7b-0c76-4f70-830d-09d28a2e0736-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.101096] env[63028]: DEBUG oslo_concurrency.lockutils [req-bd2d13d4-d80a-426c-9269-9cd40d6be3a8 req-915979c3-76ff-436b-84bd-163fa2be548d service nova] Lock "8f621e7b-0c76-4f70-830d-09d28a2e0736-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.101096] env[63028]: DEBUG nova.compute.manager [req-bd2d13d4-d80a-426c-9269-9cd40d6be3a8 req-915979c3-76ff-436b-84bd-163fa2be548d service nova] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] No waiting events found dispatching network-vif-plugged-c9de2712-61e5-456a-a822-3ed2c95d6e97 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 932.101096] env[63028]: WARNING nova.compute.manager [req-bd2d13d4-d80a-426c-9269-9cd40d6be3a8 req-915979c3-76ff-436b-84bd-163fa2be548d service nova] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Received unexpected event network-vif-plugged-c9de2712-61e5-456a-a822-3ed2c95d6e97 for instance with vm_state building and task_state spawning. [ 932.128701] env[63028]: DEBUG nova.scheduler.client.report [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 932.174840] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "b9d9fe4e-438c-4f68-b011-9eb9e10a381c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.174840] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "b9d9fe4e-438c-4f68-b011-9eb9e10a381c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.174840] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "b9d9fe4e-438c-4f68-b011-9eb9e10a381c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.175290] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "b9d9fe4e-438c-4f68-b011-9eb9e10a381c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.175477] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "b9d9fe4e-438c-4f68-b011-9eb9e10a381c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.181855] env[63028]: INFO nova.compute.manager [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Terminating instance [ 932.599566] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquiring lock "refresh_cache-8f621e7b-0c76-4f70-830d-09d28a2e0736" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.599566] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquired lock "refresh_cache-8f621e7b-0c76-4f70-830d-09d28a2e0736" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.599566] env[63028]: DEBUG nova.network.neutron [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 932.633672] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.634481] env[63028]: DEBUG nova.compute.manager [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 932.637048] env[63028]: DEBUG oslo_concurrency.lockutils [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.585s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.638920] env[63028]: DEBUG oslo_concurrency.lockutils [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.640865] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.903s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.643918] env[63028]: INFO nova.compute.claims [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 932.680491] env[63028]: INFO nova.scheduler.client.report [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Deleted allocations for instance 52b19182-a7e2-4461-b4eb-e6cd8a30024e [ 932.687688] env[63028]: DEBUG nova.compute.manager [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 932.687910] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.690482] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ef5c62-d771-4b3d-a63f-398477b922af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.702066] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.703017] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c9e283c-7709-4d7a-9f90-181a5cd3fe2f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.709237] env[63028]: DEBUG oslo_vmware.api [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 932.709237] env[63028]: value = "task-2735845" [ 932.709237] env[63028]: _type = "Task" [ 932.709237] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.721719] env[63028]: DEBUG oslo_vmware.api [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.148769] env[63028]: DEBUG nova.compute.utils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 933.150633] env[63028]: DEBUG nova.network.neutron [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 933.152899] env[63028]: DEBUG nova.compute.manager [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 933.153071] env[63028]: DEBUG nova.network.neutron [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 933.188235] env[63028]: DEBUG oslo_concurrency.lockutils [None req-938cc73b-b50c-4cda-8f80-8d0a7c526476 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "52b19182-a7e2-4461-b4eb-e6cd8a30024e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.723s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.215608] env[63028]: DEBUG nova.policy [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '736ca268dc2c434aac2165473ea28d99', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2304ce21bf141cab94fb6c342653812', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 933.224338] env[63028]: DEBUG oslo_vmware.api [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735845, 'name': PowerOffVM_Task, 'duration_secs': 0.212162} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.224617] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.224805] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 933.225049] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c4c7061-0be8-4d60-9b0c-7eeb6d5126f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.307708] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 933.307942] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 933.308140] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleting the datastore file [datastore2] b9d9fe4e-438c-4f68-b011-9eb9e10a381c {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 933.309331] env[63028]: DEBUG nova.network.neutron [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Updating instance_info_cache with network_info: [{"id": "c9de2712-61e5-456a-a822-3ed2c95d6e97", "address": "fa:16:3e:4e:e6:a5", "network": {"id": "0a328618-8169-401e-b748-99ccbfe04c2f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1291374049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e752aa946d4933b408c345f8601dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9de2712-61", "ovs_interfaceid": "c9de2712-61e5-456a-a822-3ed2c95d6e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.310530] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d535688-6e66-45b1-bba0-7c4d757eaaac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.318019] env[63028]: DEBUG oslo_vmware.api [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 933.318019] env[63028]: value = "task-2735847" [ 933.318019] env[63028]: _type = "Task" [ 933.318019] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.327908] env[63028]: DEBUG oslo_vmware.api [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.656719] env[63028]: DEBUG nova.compute.manager [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 933.674781] env[63028]: DEBUG nova.network.neutron [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Successfully created port: b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 933.813854] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Releasing lock "refresh_cache-8f621e7b-0c76-4f70-830d-09d28a2e0736" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.814188] env[63028]: DEBUG nova.compute.manager [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Instance network_info: |[{"id": "c9de2712-61e5-456a-a822-3ed2c95d6e97", "address": "fa:16:3e:4e:e6:a5", "network": {"id": "0a328618-8169-401e-b748-99ccbfe04c2f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1291374049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e752aa946d4933b408c345f8601dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9de2712-61", "ovs_interfaceid": "c9de2712-61e5-456a-a822-3ed2c95d6e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 933.814614] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:e6:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2bf99f85-3a5c-47c6-a603-e215be6ab0bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9de2712-61e5-456a-a822-3ed2c95d6e97', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 933.824805] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Creating folder: Project (a7e752aa946d4933b408c345f8601dcd). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 933.829166] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8f0306f-6ac8-49bc-bc45-2d8936da4d95 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.841036] env[63028]: DEBUG oslo_vmware.api [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735847, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146247} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.841285] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.841554] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.841718] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.842118] env[63028]: INFO nova.compute.manager [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 933.842118] env[63028]: DEBUG oslo.service.loopingcall [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.842374] env[63028]: DEBUG nova.compute.manager [-] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 933.842374] env[63028]: DEBUG nova.network.neutron [-] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.844978] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Created folder: Project (a7e752aa946d4933b408c345f8601dcd) in parent group-v550570. [ 933.845198] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Creating folder: Instances. Parent ref: group-v550798. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 933.845692] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05689b92-d61b-4ef1-b27f-2faf9b9e71e5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.854879] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Created folder: Instances in parent group-v550798. [ 933.855174] env[63028]: DEBUG oslo.service.loopingcall [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.855390] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 933.855619] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5477a125-15bb-4679-9948-526b068e591d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.879690] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 933.879690] env[63028]: value = "task-2735850" [ 933.879690] env[63028]: _type = "Task" [ 933.879690] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.889910] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735850, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.145019] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245f04af-c1a6-44e5-b27e-9f7e2eedce33 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.153366] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44f8630-7f9d-49fb-abc3-672fd52121d3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.189178] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d83309-6fe5-4f3c-a8e4-b3b46951a091 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.197610] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de88444-8543-4fc4-bcb0-4e9e97a91613 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.220012] env[63028]: DEBUG nova.compute.provider_tree [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.294530] env[63028]: DEBUG nova.compute.manager [req-771782d6-0b86-42cf-a26f-f84ea7100504 req-467caa88-7b45-4fb7-98fd-65b7f75f7a42 service nova] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Received event network-changed-c9de2712-61e5-456a-a822-3ed2c95d6e97 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 934.294819] env[63028]: DEBUG nova.compute.manager [req-771782d6-0b86-42cf-a26f-f84ea7100504 req-467caa88-7b45-4fb7-98fd-65b7f75f7a42 service nova] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Refreshing instance network info cache due to event network-changed-c9de2712-61e5-456a-a822-3ed2c95d6e97. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 934.295148] env[63028]: DEBUG oslo_concurrency.lockutils [req-771782d6-0b86-42cf-a26f-f84ea7100504 req-467caa88-7b45-4fb7-98fd-65b7f75f7a42 service nova] Acquiring lock "refresh_cache-8f621e7b-0c76-4f70-830d-09d28a2e0736" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.295371] env[63028]: DEBUG oslo_concurrency.lockutils [req-771782d6-0b86-42cf-a26f-f84ea7100504 req-467caa88-7b45-4fb7-98fd-65b7f75f7a42 service nova] Acquired lock "refresh_cache-8f621e7b-0c76-4f70-830d-09d28a2e0736" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.295619] env[63028]: DEBUG nova.network.neutron [req-771782d6-0b86-42cf-a26f-f84ea7100504 req-467caa88-7b45-4fb7-98fd-65b7f75f7a42 service nova] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Refreshing network info cache for port c9de2712-61e5-456a-a822-3ed2c95d6e97 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 934.390416] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735850, 'name': CreateVM_Task, 'duration_secs': 0.473316} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.390624] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 934.391449] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.391622] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.391944] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 934.392414] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c412622-6e5c-48be-8ec2-25664c3a2f69 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.398362] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Waiting for the task: (returnval){ [ 934.398362] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52de8474-eb1f-b48e-5a6b-89d6345ee05e" [ 934.398362] env[63028]: _type = "Task" [ 934.398362] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.407487] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52de8474-eb1f-b48e-5a6b-89d6345ee05e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.690645] env[63028]: DEBUG nova.compute.manager [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 934.712366] env[63028]: DEBUG oslo_concurrency.lockutils [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "f3277886-4498-45c6-be68-e71d8293dc00" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.712814] env[63028]: DEBUG oslo_concurrency.lockutils [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "f3277886-4498-45c6-be68-e71d8293dc00" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.713063] env[63028]: DEBUG oslo_concurrency.lockutils [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "f3277886-4498-45c6-be68-e71d8293dc00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.713271] env[63028]: DEBUG oslo_concurrency.lockutils [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "f3277886-4498-45c6-be68-e71d8293dc00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.713445] env[63028]: DEBUG oslo_concurrency.lockutils [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "f3277886-4498-45c6-be68-e71d8293dc00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.717314] env[63028]: DEBUG nova.virt.hardware [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 934.717542] env[63028]: DEBUG nova.virt.hardware [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.717698] env[63028]: DEBUG nova.virt.hardware [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 934.717878] env[63028]: DEBUG nova.virt.hardware [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.718032] env[63028]: DEBUG nova.virt.hardware [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 934.718184] env[63028]: DEBUG nova.virt.hardware [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 934.718392] env[63028]: DEBUG nova.virt.hardware [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 934.718613] env[63028]: DEBUG nova.virt.hardware [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 934.718800] env[63028]: DEBUG nova.virt.hardware [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 934.718962] env[63028]: DEBUG nova.virt.hardware [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 934.719178] env[63028]: DEBUG nova.virt.hardware [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 934.720016] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4962f2-2d79-46d8-92bd-e7c9aa89f973 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.723032] env[63028]: INFO nova.compute.manager [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Terminating instance [ 934.725013] env[63028]: DEBUG nova.scheduler.client.report [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 934.739924] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae42d102-31c7-4dc5-ac8e-1432c7f64f28 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.853193] env[63028]: DEBUG nova.network.neutron [-] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.913108] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52de8474-eb1f-b48e-5a6b-89d6345ee05e, 'name': SearchDatastore_Task, 'duration_secs': 0.010707} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.913424] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.913680] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 934.913955] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.914710] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.914710] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 934.914710] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d394bf0-8964-4b03-937b-5ff70b914fca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.924345] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 934.924527] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 934.925349] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a62b68e-b2c6-4f3d-aaa8-daf1643b5c6d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.933750] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Waiting for the task: (returnval){ [ 934.933750] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522d2fa4-2f96-2f29-1ff0-7dae2517acc5" [ 934.933750] env[63028]: _type = "Task" [ 934.933750] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.947021] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522d2fa4-2f96-2f29-1ff0-7dae2517acc5, 'name': SearchDatastore_Task, 'duration_secs': 0.010314} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.947892] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b83226a4-871f-4f65-a20f-520c7bd7435f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.952912] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Waiting for the task: (returnval){ [ 934.952912] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528cc326-d469-0cf0-07bc-feca730dba33" [ 934.952912] env[63028]: _type = "Task" [ 934.952912] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.961056] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528cc326-d469-0cf0-07bc-feca730dba33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.034684] env[63028]: DEBUG nova.network.neutron [req-771782d6-0b86-42cf-a26f-f84ea7100504 req-467caa88-7b45-4fb7-98fd-65b7f75f7a42 service nova] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Updated VIF entry in instance network info cache for port c9de2712-61e5-456a-a822-3ed2c95d6e97. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 935.035008] env[63028]: DEBUG nova.network.neutron [req-771782d6-0b86-42cf-a26f-f84ea7100504 req-467caa88-7b45-4fb7-98fd-65b7f75f7a42 service nova] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Updating instance_info_cache with network_info: [{"id": "c9de2712-61e5-456a-a822-3ed2c95d6e97", "address": "fa:16:3e:4e:e6:a5", "network": {"id": "0a328618-8169-401e-b748-99ccbfe04c2f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1291374049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e752aa946d4933b408c345f8601dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9de2712-61", "ovs_interfaceid": "c9de2712-61e5-456a-a822-3ed2c95d6e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.235353] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.236121] env[63028]: DEBUG nova.compute.manager [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 935.239634] env[63028]: DEBUG nova.compute.manager [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 935.239634] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 935.240214] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.204s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.240214] env[63028]: DEBUG nova.objects.instance [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lazy-loading 'resources' on Instance uuid cd11b318-9158-4f1d-8aa8-1c9d565bb5d5 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.242010] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da45def-1a66-4c52-9f94-021c2dd18594 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.256690] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.257563] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89692661-afae-4ecc-8853-9b45045200e8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.264850] env[63028]: DEBUG oslo_vmware.api [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 935.264850] env[63028]: value = "task-2735851" [ 935.264850] env[63028]: _type = "Task" [ 935.264850] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.273808] env[63028]: DEBUG oslo_vmware.api [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735851, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.357742] env[63028]: INFO nova.compute.manager [-] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Took 1.52 seconds to deallocate network for instance. [ 935.464095] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528cc326-d469-0cf0-07bc-feca730dba33, 'name': SearchDatastore_Task, 'duration_secs': 0.009069} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.464411] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.464629] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 8f621e7b-0c76-4f70-830d-09d28a2e0736/8f621e7b-0c76-4f70-830d-09d28a2e0736.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 935.465187] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc8c986b-454b-4931-b22d-bc720a309fcb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.471420] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Waiting for the task: (returnval){ [ 935.471420] env[63028]: value = "task-2735852" [ 935.471420] env[63028]: _type = "Task" [ 935.471420] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.485717] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735852, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.538868] env[63028]: DEBUG oslo_concurrency.lockutils [req-771782d6-0b86-42cf-a26f-f84ea7100504 req-467caa88-7b45-4fb7-98fd-65b7f75f7a42 service nova] Releasing lock "refresh_cache-8f621e7b-0c76-4f70-830d-09d28a2e0736" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.576312] env[63028]: DEBUG nova.compute.manager [req-d00da57c-8494-4a1f-a4dd-cb921147d659 req-1c162aed-1ead-4314-adec-6f6dc67ed12c service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Received event network-vif-plugged-b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 935.576550] env[63028]: DEBUG oslo_concurrency.lockutils [req-d00da57c-8494-4a1f-a4dd-cb921147d659 req-1c162aed-1ead-4314-adec-6f6dc67ed12c service nova] Acquiring lock "8bb61bfa-d44e-4e06-867a-445d9b3db660-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.576951] env[63028]: DEBUG oslo_concurrency.lockutils [req-d00da57c-8494-4a1f-a4dd-cb921147d659 req-1c162aed-1ead-4314-adec-6f6dc67ed12c service nova] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.577024] env[63028]: DEBUG oslo_concurrency.lockutils [req-d00da57c-8494-4a1f-a4dd-cb921147d659 req-1c162aed-1ead-4314-adec-6f6dc67ed12c service nova] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.577776] env[63028]: DEBUG nova.compute.manager [req-d00da57c-8494-4a1f-a4dd-cb921147d659 req-1c162aed-1ead-4314-adec-6f6dc67ed12c service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] No waiting events found dispatching network-vif-plugged-b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 935.577951] env[63028]: WARNING nova.compute.manager [req-d00da57c-8494-4a1f-a4dd-cb921147d659 req-1c162aed-1ead-4314-adec-6f6dc67ed12c service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Received unexpected event network-vif-plugged-b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 for instance with vm_state building and task_state spawning. [ 935.615599] env[63028]: DEBUG nova.network.neutron [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Successfully updated port: b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 935.750653] env[63028]: DEBUG nova.compute.utils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 935.754943] env[63028]: DEBUG nova.compute.manager [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 935.755146] env[63028]: DEBUG nova.network.neutron [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 935.778520] env[63028]: DEBUG oslo_vmware.api [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735851, 'name': PowerOffVM_Task, 'duration_secs': 0.282754} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.782010] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 935.782196] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 935.786029] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a517b1e-8007-401a-bbac-99ed49855369 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.825705] env[63028]: DEBUG nova.policy [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab9cb927bc134277bb980682fef01978', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ef9a42771824708832a74238bbe89c0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 935.859094] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 935.859440] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 935.859676] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Deleting the datastore file [datastore1] f3277886-4498-45c6-be68-e71d8293dc00 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 935.863463] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd8db503-a38e-4460-b815-8b8bfbdfc961 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.867686] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.872227] env[63028]: DEBUG oslo_vmware.api [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for the task: (returnval){ [ 935.872227] env[63028]: value = "task-2735854" [ 935.872227] env[63028]: _type = "Task" [ 935.872227] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.883388] env[63028]: DEBUG oslo_vmware.api [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735854, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.985246] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735852, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.121630] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.121834] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.121945] env[63028]: DEBUG nova.network.neutron [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.254444] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549770d1-2fe6-4829-99ae-7403df668998 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.258682] env[63028]: DEBUG nova.compute.manager [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 936.263874] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5299b854-2a24-4b84-9300-cf32fb39c918 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.298644] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac965caf-3b10-49f7-91fe-2122d192e17c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.307771] env[63028]: DEBUG nova.network.neutron [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Successfully created port: 562720f0-e1d1-414a-a602-d4ae400ade6f {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.311832] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6594148d-d4fa-464d-9a32-3636f4d5e4dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.327729] env[63028]: DEBUG nova.compute.provider_tree [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.383576] env[63028]: DEBUG oslo_vmware.api [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Task: {'id': task-2735854, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236022} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.383576] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.383701] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 936.383853] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 936.384267] env[63028]: INFO nova.compute.manager [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Took 1.14 seconds to destroy the instance on the hypervisor. [ 936.384557] env[63028]: DEBUG oslo.service.loopingcall [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.384769] env[63028]: DEBUG nova.compute.manager [-] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 936.384882] env[63028]: DEBUG nova.network.neutron [-] [instance: f3277886-4498-45c6-be68-e71d8293dc00] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 936.406611] env[63028]: DEBUG nova.compute.manager [req-c8386d73-a2d8-49bf-9487-f3043e19c0a1 req-4a2dd384-7f44-41b4-af37-77a04c914e69 service nova] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Received event network-vif-deleted-a07ae27f-6861-4db7-be8b-8f9e4f170f3e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 936.481867] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735852, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514397} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.482396] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 8f621e7b-0c76-4f70-830d-09d28a2e0736/8f621e7b-0c76-4f70-830d-09d28a2e0736.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 936.482667] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 936.482952] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e4155ee-370f-486d-a25d-8a9525d22f66 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.489253] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Waiting for the task: (returnval){ [ 936.489253] env[63028]: value = "task-2735855" [ 936.489253] env[63028]: _type = "Task" [ 936.489253] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.497325] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735855, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.685193] env[63028]: DEBUG nova.network.neutron [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 936.830995] env[63028]: DEBUG nova.scheduler.client.report [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 936.939830] env[63028]: DEBUG nova.network.neutron [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updating instance_info_cache with network_info: [{"id": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "address": "fa:16:3e:cb:aa:63", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0d7c3ce-e8", "ovs_interfaceid": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.999469] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735855, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.153441} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.999775] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 937.000732] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3203e66-6fb2-406b-aa5a-b7b1d4f21040 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.026898] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 8f621e7b-0c76-4f70-830d-09d28a2e0736/8f621e7b-0c76-4f70-830d-09d28a2e0736.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 937.027242] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32cb81d7-f97e-4007-891f-ab481b6fbbaf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.047749] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Waiting for the task: (returnval){ [ 937.047749] env[63028]: value = "task-2735856" [ 937.047749] env[63028]: _type = "Task" [ 937.047749] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.055986] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735856, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.263477] env[63028]: DEBUG nova.network.neutron [-] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.271585] env[63028]: DEBUG nova.compute.manager [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 937.308830] env[63028]: DEBUG nova.virt.hardware [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 937.309501] env[63028]: DEBUG nova.virt.hardware [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 937.309787] env[63028]: DEBUG nova.virt.hardware [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 937.310131] env[63028]: DEBUG nova.virt.hardware [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 937.310483] env[63028]: DEBUG nova.virt.hardware [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 937.310871] env[63028]: DEBUG nova.virt.hardware [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 937.313249] env[63028]: DEBUG nova.virt.hardware [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 937.313249] env[63028]: DEBUG nova.virt.hardware [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 937.313249] env[63028]: DEBUG nova.virt.hardware [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 937.313249] env[63028]: DEBUG nova.virt.hardware [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 937.313249] env[63028]: DEBUG nova.virt.hardware [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 937.313971] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0ee981-755b-464a-8d28-fa46dd5331a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.325826] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168f2984-1d4f-46d5-b460-239a00855153 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.341040] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.101s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.343425] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 21.084s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.372530] env[63028]: INFO nova.scheduler.client.report [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleted allocations for instance cd11b318-9158-4f1d-8aa8-1c9d565bb5d5 [ 937.443781] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.444123] env[63028]: DEBUG nova.compute.manager [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Instance network_info: |[{"id": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "address": "fa:16:3e:cb:aa:63", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0d7c3ce-e8", "ovs_interfaceid": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 937.444634] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:aa:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0954fad3-d24d-496c-83e6-a09d3cb556fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 937.455133] env[63028]: DEBUG oslo.service.loopingcall [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 937.455679] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 937.456278] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cda80881-7bd4-4305-84c9-764ba63da6d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.478713] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 937.478713] env[63028]: value = "task-2735857" [ 937.478713] env[63028]: _type = "Task" [ 937.478713] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.489035] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735857, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.558851] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735856, 'name': ReconfigVM_Task, 'duration_secs': 0.326619} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.558851] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 8f621e7b-0c76-4f70-830d-09d28a2e0736/8f621e7b-0c76-4f70-830d-09d28a2e0736.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 937.558851] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce7fe643-75ca-4a4f-b49c-1f8edb6b8e1e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.565297] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Waiting for the task: (returnval){ [ 937.565297] env[63028]: value = "task-2735858" [ 937.565297] env[63028]: _type = "Task" [ 937.565297] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.575686] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735858, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.612798] env[63028]: DEBUG nova.compute.manager [req-562a2ee9-5b37-41e1-870e-f618ac03da0b req-3fb2e0af-f497-4d55-955e-159e99940a92 service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Received event network-changed-b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 937.612915] env[63028]: DEBUG nova.compute.manager [req-562a2ee9-5b37-41e1-870e-f618ac03da0b req-3fb2e0af-f497-4d55-955e-159e99940a92 service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Refreshing instance network info cache due to event network-changed-b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 937.613078] env[63028]: DEBUG oslo_concurrency.lockutils [req-562a2ee9-5b37-41e1-870e-f618ac03da0b req-3fb2e0af-f497-4d55-955e-159e99940a92 service nova] Acquiring lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.613246] env[63028]: DEBUG oslo_concurrency.lockutils [req-562a2ee9-5b37-41e1-870e-f618ac03da0b req-3fb2e0af-f497-4d55-955e-159e99940a92 service nova] Acquired lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.613440] env[63028]: DEBUG nova.network.neutron [req-562a2ee9-5b37-41e1-870e-f618ac03da0b req-3fb2e0af-f497-4d55-955e-159e99940a92 service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Refreshing network info cache for port b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 937.766268] env[63028]: INFO nova.compute.manager [-] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Took 1.38 seconds to deallocate network for instance. [ 937.848287] env[63028]: INFO nova.compute.claims [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 937.885043] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fe1a3795-d92a-4f03-9066-974eec192970 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "cd11b318-9158-4f1d-8aa8-1c9d565bb5d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.250s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.995523] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735857, 'name': CreateVM_Task, 'duration_secs': 0.344857} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.995796] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 937.999247] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.999605] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.000136] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 938.003934] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b3f3908-0190-4fe2-87d9-fe54eaf3f23d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.010485] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 938.010485] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5212acc1-c9f9-936a-09e3-ee5b5042a136" [ 938.010485] env[63028]: _type = "Task" [ 938.010485] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.026443] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5212acc1-c9f9-936a-09e3-ee5b5042a136, 'name': SearchDatastore_Task, 'duration_secs': 0.009348} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.026940] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.027374] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 938.028162] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.028875] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.029255] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 938.029680] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-052c3a4d-0b2c-4a5c-bec7-642cd67c136d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.039551] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 938.039988] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 938.041147] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5df2206e-8944-4d06-861d-6dab81c32959 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.047985] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 938.047985] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526a8a3a-bb8d-49a4-7aa4-df666a7543f2" [ 938.047985] env[63028]: _type = "Task" [ 938.047985] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.061057] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526a8a3a-bb8d-49a4-7aa4-df666a7543f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.078077] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735858, 'name': Rename_Task, 'duration_secs': 0.13841} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.078552] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 938.079085] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3ec9015-1503-44d5-91ab-b9c01929a830 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.087340] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Waiting for the task: (returnval){ [ 938.087340] env[63028]: value = "task-2735859" [ 938.087340] env[63028]: _type = "Task" [ 938.087340] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.098978] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735859, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.254438] env[63028]: DEBUG nova.network.neutron [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Successfully updated port: 562720f0-e1d1-414a-a602-d4ae400ade6f {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 938.273020] env[63028]: DEBUG oslo_concurrency.lockutils [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.355175] env[63028]: INFO nova.compute.resource_tracker [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating resource usage from migration 8ac4afc4-5919-47ff-9cce-ace0d06a50ba [ 938.377788] env[63028]: DEBUG nova.network.neutron [req-562a2ee9-5b37-41e1-870e-f618ac03da0b req-3fb2e0af-f497-4d55-955e-159e99940a92 service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updated VIF entry in instance network info cache for port b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 938.378177] env[63028]: DEBUG nova.network.neutron [req-562a2ee9-5b37-41e1-870e-f618ac03da0b req-3fb2e0af-f497-4d55-955e-159e99940a92 service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updating instance_info_cache with network_info: [{"id": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "address": "fa:16:3e:cb:aa:63", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0d7c3ce-e8", "ovs_interfaceid": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.491458] env[63028]: DEBUG oslo_concurrency.lockutils [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.491701] env[63028]: DEBUG oslo_concurrency.lockutils [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.566082] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526a8a3a-bb8d-49a4-7aa4-df666a7543f2, 'name': SearchDatastore_Task, 'duration_secs': 0.010186} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.572514] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94a7ffc2-c245-4230-90ad-fab819362c55 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.579251] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 938.579251] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52087858-5598-700a-c867-22bf013af5b4" [ 938.579251] env[63028]: _type = "Task" [ 938.579251] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.605329] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52087858-5598-700a-c867-22bf013af5b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.611330] env[63028]: DEBUG oslo_vmware.api [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735859, 'name': PowerOnVM_Task, 'duration_secs': 0.485606} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.611789] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 938.612031] env[63028]: INFO nova.compute.manager [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Took 7.76 seconds to spawn the instance on the hypervisor. [ 938.612219] env[63028]: DEBUG nova.compute.manager [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 938.613081] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1fbc97-3d0d-467a-9d68-de232e34f3fa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.631344] env[63028]: DEBUG nova.compute.manager [req-f01828ff-d966-496c-a9a9-cedea0517a16 req-b6e79f28-328b-479e-8fbb-d0eb496cb906 service nova] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Received event network-vif-deleted-08a61148-5b3a-4bb0-a130-3eda62d6bf7c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 938.757142] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "refresh_cache-672695c2-06f3-4790-a459-4b575baf29d3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.757142] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "refresh_cache-672695c2-06f3-4790-a459-4b575baf29d3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.757142] env[63028]: DEBUG nova.network.neutron [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 938.799968] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f5ce90-0f8e-4630-b361-24b6482e0143 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.808008] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd63ade-983b-4ec1-babf-3df07b9479cb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.844526] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3ecd16-c8d1-4d87-976b-9ab6f4d5cab5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.852401] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39e9c4a-3fc0-46f2-aa87-9a4df0a13b96 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.868330] env[63028]: DEBUG nova.compute.provider_tree [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.884217] env[63028]: DEBUG oslo_concurrency.lockutils [req-562a2ee9-5b37-41e1-870e-f618ac03da0b req-3fb2e0af-f497-4d55-955e-159e99940a92 service nova] Releasing lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.994749] env[63028]: DEBUG nova.compute.utils [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 939.090846] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52087858-5598-700a-c867-22bf013af5b4, 'name': SearchDatastore_Task, 'duration_secs': 0.036564} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.091130] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.091388] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 8bb61bfa-d44e-4e06-867a-445d9b3db660/8bb61bfa-d44e-4e06-867a-445d9b3db660.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 939.091643] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a44203e0-e128-4701-a95a-e4ed28c1c1a5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.101615] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 939.101615] env[63028]: value = "task-2735860" [ 939.101615] env[63028]: _type = "Task" [ 939.101615] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.110188] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735860, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.132437] env[63028]: INFO nova.compute.manager [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Took 40.46 seconds to build instance. [ 939.304265] env[63028]: DEBUG nova.network.neutron [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 939.372099] env[63028]: DEBUG nova.scheduler.client.report [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 939.500405] env[63028]: DEBUG oslo_concurrency.lockutils [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.622421] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735860, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477429} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.622421] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 8bb61bfa-d44e-4e06-867a-445d9b3db660/8bb61bfa-d44e-4e06-867a-445d9b3db660.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 939.622421] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 939.622421] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6304e048-9c03-4e9d-a56f-6e1abe454e0b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.631030] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 939.631030] env[63028]: value = "task-2735861" [ 939.631030] env[63028]: _type = "Task" [ 939.631030] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.634568] env[63028]: DEBUG oslo_concurrency.lockutils [None req-418ddbfe-4db6-49c4-80df-5e4e8fabaf08 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lock "8f621e7b-0c76-4f70-830d-09d28a2e0736" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.973s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.640512] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735861, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.797828] env[63028]: DEBUG nova.network.neutron [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Updating instance_info_cache with network_info: [{"id": "562720f0-e1d1-414a-a602-d4ae400ade6f", "address": "fa:16:3e:d2:a6:d4", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap562720f0-e1", "ovs_interfaceid": "562720f0-e1d1-414a-a602-d4ae400ade6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.879908] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.536s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.881499] env[63028]: INFO nova.compute.manager [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Migrating [ 939.887521] env[63028]: DEBUG oslo_concurrency.lockutils [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.790s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.887824] env[63028]: DEBUG nova.objects.instance [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lazy-loading 'resources' on Instance uuid addcf94a-1a56-49ff-8adb-3ce7f2d1e09e {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 939.991290] env[63028]: DEBUG nova.compute.manager [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Received event network-vif-plugged-562720f0-e1d1-414a-a602-d4ae400ade6f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 939.991290] env[63028]: DEBUG oslo_concurrency.lockutils [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] Acquiring lock "672695c2-06f3-4790-a459-4b575baf29d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.991290] env[63028]: DEBUG oslo_concurrency.lockutils [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] Lock "672695c2-06f3-4790-a459-4b575baf29d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.991290] env[63028]: DEBUG oslo_concurrency.lockutils [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] Lock "672695c2-06f3-4790-a459-4b575baf29d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.991290] env[63028]: DEBUG nova.compute.manager [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] No waiting events found dispatching network-vif-plugged-562720f0-e1d1-414a-a602-d4ae400ade6f {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 939.991290] env[63028]: WARNING nova.compute.manager [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Received unexpected event network-vif-plugged-562720f0-e1d1-414a-a602-d4ae400ade6f for instance with vm_state building and task_state spawning. [ 939.991290] env[63028]: DEBUG nova.compute.manager [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Received event network-changed-562720f0-e1d1-414a-a602-d4ae400ade6f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 939.991290] env[63028]: DEBUG nova.compute.manager [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Refreshing instance network info cache due to event network-changed-562720f0-e1d1-414a-a602-d4ae400ade6f. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 939.991290] env[63028]: DEBUG oslo_concurrency.lockutils [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] Acquiring lock "refresh_cache-672695c2-06f3-4790-a459-4b575baf29d3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.145973] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735861, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.301078] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "refresh_cache-672695c2-06f3-4790-a459-4b575baf29d3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.301437] env[63028]: DEBUG nova.compute.manager [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Instance network_info: |[{"id": "562720f0-e1d1-414a-a602-d4ae400ade6f", "address": "fa:16:3e:d2:a6:d4", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap562720f0-e1", "ovs_interfaceid": "562720f0-e1d1-414a-a602-d4ae400ade6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 940.301748] env[63028]: DEBUG oslo_concurrency.lockutils [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] Acquired lock "refresh_cache-672695c2-06f3-4790-a459-4b575baf29d3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.301924] env[63028]: DEBUG nova.network.neutron [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Refreshing network info cache for port 562720f0-e1d1-414a-a602-d4ae400ade6f {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 940.303216] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:a6:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c330dbdb-ad20-4e7e-8a12-66e4a914a84a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '562720f0-e1d1-414a-a602-d4ae400ade6f', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 940.312356] env[63028]: DEBUG oslo.service.loopingcall [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 940.313854] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 940.314376] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4816ca16-18bb-4e47-92ab-d4392475a111 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.335407] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 940.335407] env[63028]: value = "task-2735862" [ 940.335407] env[63028]: _type = "Task" [ 940.335407] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.343244] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735862, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.400409] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.400590] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.400765] env[63028]: DEBUG nova.network.neutron [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.466669] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquiring lock "8f621e7b-0c76-4f70-830d-09d28a2e0736" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.466739] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lock "8f621e7b-0c76-4f70-830d-09d28a2e0736" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.466903] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquiring lock "8f621e7b-0c76-4f70-830d-09d28a2e0736-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.467100] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lock "8f621e7b-0c76-4f70-830d-09d28a2e0736-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.467330] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lock "8f621e7b-0c76-4f70-830d-09d28a2e0736-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.471565] env[63028]: INFO nova.compute.manager [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Terminating instance [ 940.563084] env[63028]: DEBUG oslo_concurrency.lockutils [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.563338] env[63028]: DEBUG oslo_concurrency.lockutils [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.563570] env[63028]: INFO nova.compute.manager [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Attaching volume 0e21440a-90bd-4920-bda0-bdf25396cbd3 to /dev/sdb [ 940.612389] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1bc561b-cee5-4733-8c1a-edca7f581e54 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.624725] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575dae8c-b0a4-40f6-8b98-6a066ddb509f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.646381] env[63028]: DEBUG nova.virt.block_device [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Updating existing volume attachment record: 13ff185a-5f56-40d1-baa0-ec5c1f7a0bcd {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 940.657355] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735861, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.671954] env[63028]: DEBUG nova.network.neutron [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Updated VIF entry in instance network info cache for port 562720f0-e1d1-414a-a602-d4ae400ade6f. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 940.671954] env[63028]: DEBUG nova.network.neutron [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Updating instance_info_cache with network_info: [{"id": "562720f0-e1d1-414a-a602-d4ae400ade6f", "address": "fa:16:3e:d2:a6:d4", "network": {"id": "35eace8d-0404-444c-a1ec-e13fd5687644", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1634949692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef9a42771824708832a74238bbe89c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap562720f0-e1", "ovs_interfaceid": "562720f0-e1d1-414a-a602-d4ae400ade6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.848508] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735862, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.863973] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9857044-3e59-4f9a-9a9c-8c648ff04673 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.872143] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa862b3b-73f8-4ebc-9ed4-f451aa7b7801 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.903850] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f15d06-d86a-4615-8fb2-35d1d344aefb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.918545] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3edbf8-881a-4c71-8d0a-f65e49e22f13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.934257] env[63028]: DEBUG nova.compute.provider_tree [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.979860] env[63028]: DEBUG nova.compute.manager [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 940.980152] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 940.981102] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6b7eda-5cef-4f00-a9e0-f0c02df0d2fc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.991030] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.991280] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3511866f-2c94-4bef-87e7-aed6e1d8bc7e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.997336] env[63028]: DEBUG oslo_vmware.api [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Waiting for the task: (returnval){ [ 940.997336] env[63028]: value = "task-2735866" [ 940.997336] env[63028]: _type = "Task" [ 940.997336] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.005902] env[63028]: DEBUG oslo_vmware.api [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735866, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.147038] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735861, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.058696} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.148033] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 941.148220] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919ea966-1a47-425a-866b-4c5b16ca2b68 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.172788] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 8bb61bfa-d44e-4e06-867a-445d9b3db660/8bb61bfa-d44e-4e06-867a-445d9b3db660.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 941.173542] env[63028]: DEBUG oslo_concurrency.lockutils [req-a2297d21-5ea7-4bb9-b917-7bd3999644b9 req-7b1a4ba9-24b7-4f0d-b38a-ea667c1d7566 service nova] Releasing lock "refresh_cache-672695c2-06f3-4790-a459-4b575baf29d3" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.174016] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93f34401-7904-428f-839f-bcd5fd2aa753 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.202782] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 941.202782] env[63028]: value = "task-2735867" [ 941.202782] env[63028]: _type = "Task" [ 941.202782] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.208902] env[63028]: DEBUG nova.network.neutron [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance_info_cache with network_info: [{"id": "cd8436f9-6412-468e-bd24-f9d845d3ca21", "address": "fa:16:3e:ff:70:93", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd8436f9-64", "ovs_interfaceid": "cd8436f9-6412-468e-bd24-f9d845d3ca21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.214169] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735867, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.346879] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735862, 'name': CreateVM_Task, 'duration_secs': 0.589629} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.347124] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 941.347877] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.348601] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.349090] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 941.349423] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eff3e383-fba4-47f3-ad51-d6a3dbea4ba7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.355560] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 941.355560] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527265b5-8d41-ce3e-b73a-f56c8e6b25aa" [ 941.355560] env[63028]: _type = "Task" [ 941.355560] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.365637] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527265b5-8d41-ce3e-b73a-f56c8e6b25aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.437106] env[63028]: DEBUG nova.scheduler.client.report [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 941.506696] env[63028]: DEBUG oslo_vmware.api [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735866, 'name': PowerOffVM_Task, 'duration_secs': 0.26381} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.507752] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.507752] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.507752] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79ce4b68-dc26-4c57-b926-6b6703e27ad9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.585470] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 941.585470] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 941.585470] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Deleting the datastore file [datastore2] 8f621e7b-0c76-4f70-830d-09d28a2e0736 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.585681] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54b23b88-3981-4d91-8912-4dcdd209a965 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.591823] env[63028]: DEBUG oslo_vmware.api [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Waiting for the task: (returnval){ [ 941.591823] env[63028]: value = "task-2735869" [ 941.591823] env[63028]: _type = "Task" [ 941.591823] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.600230] env[63028]: DEBUG oslo_vmware.api [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.714430] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735867, 'name': ReconfigVM_Task, 'duration_secs': 0.300383} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.715435] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 8bb61bfa-d44e-4e06-867a-445d9b3db660/8bb61bfa-d44e-4e06-867a-445d9b3db660.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.715833] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.717309] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d74fa7a-4eef-48cd-98f7-4a7feb0f7502 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.725053] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 941.725053] env[63028]: value = "task-2735870" [ 941.725053] env[63028]: _type = "Task" [ 941.725053] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.739425] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735870, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.777546] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "56d6982d-9f76-4952-8c8b-f64b3c8d02fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.778390] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "56d6982d-9f76-4952-8c8b-f64b3c8d02fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.867587] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527265b5-8d41-ce3e-b73a-f56c8e6b25aa, 'name': SearchDatastore_Task, 'duration_secs': 0.009974} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.867935] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.868256] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 941.868509] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.868660] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.869558] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.869558] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c37a397-aebd-4f6c-9ab6-5c7deea1470e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.877703] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.877889] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 941.878737] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-131f96be-029a-46e9-9f7d-382163e82f73 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.883766] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 941.883766] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52905b32-45a5-684f-4678-838c9ca55690" [ 941.883766] env[63028]: _type = "Task" [ 941.883766] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.891753] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52905b32-45a5-684f-4678-838c9ca55690, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.944742] env[63028]: DEBUG oslo_concurrency.lockutils [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.055s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.946239] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.075s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.969536] env[63028]: INFO nova.scheduler.client.report [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted allocations for instance addcf94a-1a56-49ff-8adb-3ce7f2d1e09e [ 942.103455] env[63028]: DEBUG oslo_vmware.api [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Task: {'id': task-2735869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181235} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.104411] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 942.104411] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 942.104723] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 942.104910] env[63028]: INFO nova.compute.manager [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Took 1.12 seconds to destroy the instance on the hypervisor. [ 942.105243] env[63028]: DEBUG oslo.service.loopingcall [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.107736] env[63028]: DEBUG nova.compute.manager [-] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 942.107856] env[63028]: DEBUG nova.network.neutron [-] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 942.236733] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735870, 'name': Rename_Task, 'duration_secs': 0.148682} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.239197] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 942.239884] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5524c53-0b8d-4801-b487-d31e719e5339 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.247266] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 942.247266] env[63028]: value = "task-2735871" [ 942.247266] env[63028]: _type = "Task" [ 942.247266] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.254918] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735871, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.281021] env[63028]: DEBUG nova.compute.manager [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 942.343706] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03eea559-1a55-45a6-8369-61cfc959269d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.351118] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3751c7-f2d6-4d41-b99d-9c2879eb44f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.383274] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0423a357-03ef-4f60-b23e-e86e8466ef70 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.396666] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aebb1a4-6711-474c-86da-f9b3d67df31f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.400356] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52905b32-45a5-684f-4678-838c9ca55690, 'name': SearchDatastore_Task, 'duration_secs': 0.008487} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.402783] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba07f2b3-5738-4791-ba9d-d76834704e2c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.412617] env[63028]: DEBUG nova.compute.provider_tree [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.417441] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 942.417441] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520700fd-8fe9-ab88-c134-adaae937b292" [ 942.417441] env[63028]: _type = "Task" [ 942.417441] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.425683] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520700fd-8fe9-ab88-c134-adaae937b292, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.447950] env[63028]: DEBUG nova.compute.manager [req-09841ae5-6cd2-4616-a841-c4123d0b3858 req-2ff6bd84-fa0b-40b9-a4ad-fe0105f19554 service nova] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Received event network-vif-deleted-c9de2712-61e5-456a-a822-3ed2c95d6e97 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 942.448166] env[63028]: INFO nova.compute.manager [req-09841ae5-6cd2-4616-a841-c4123d0b3858 req-2ff6bd84-fa0b-40b9-a4ad-fe0105f19554 service nova] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Neutron deleted interface c9de2712-61e5-456a-a822-3ed2c95d6e97; detaching it from the instance and deleting it from the info cache [ 942.448378] env[63028]: DEBUG nova.network.neutron [req-09841ae5-6cd2-4616-a841-c4123d0b3858 req-2ff6bd84-fa0b-40b9-a4ad-fe0105f19554 service nova] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.478977] env[63028]: DEBUG oslo_concurrency.lockutils [None req-987622b0-68b7-462a-9053-87ac800a435e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "addcf94a-1a56-49ff-8adb-3ce7f2d1e09e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.937s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.589650] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "63524cd8-81de-419f-bb07-0326f3cb393f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.589785] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "63524cd8-81de-419f-bb07-0326f3cb393f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.590102] env[63028]: INFO nova.compute.manager [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Shelving [ 942.763715] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735871, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.798879] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.902943] env[63028]: DEBUG nova.network.neutron [-] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.916473] env[63028]: DEBUG nova.scheduler.client.report [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 942.931228] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520700fd-8fe9-ab88-c134-adaae937b292, 'name': SearchDatastore_Task, 'duration_secs': 0.010896} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.931509] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.931756] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 672695c2-06f3-4790-a459-4b575baf29d3/672695c2-06f3-4790-a459-4b575baf29d3.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 942.932708] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1997831-6f04-4b7e-915b-49f635fe48fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.941614] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 942.941614] env[63028]: value = "task-2735872" [ 942.941614] env[63028]: _type = "Task" [ 942.941614] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.950190] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735872, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.950448] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d70ad6e3-9e3e-4293-80d7-c8cef1c91960 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.958808] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3643a2a-844a-4208-9f0c-8887451202d3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.993286] env[63028]: DEBUG nova.compute.manager [req-09841ae5-6cd2-4616-a841-c4123d0b3858 req-2ff6bd84-fa0b-40b9-a4ad-fe0105f19554 service nova] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Detach interface failed, port_id=c9de2712-61e5-456a-a822-3ed2c95d6e97, reason: Instance 8f621e7b-0c76-4f70-830d-09d28a2e0736 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 943.237342] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1b2062-b78c-40e2-b87d-753125e04a3d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.258563] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance 'ed872f21-c2c4-4597-8c9e-9f8d2202b707' progress to 0 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 943.271622] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735871, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.407798] env[63028]: INFO nova.compute.manager [-] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Took 1.30 seconds to deallocate network for instance. [ 943.423158] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.477s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.424171] env[63028]: INFO nova.compute.manager [None req-e330a647-c60e-4be4-a03b-b64be279ce3b tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Successfully reverted task state from rebuilding on failure for instance. [ 943.428581] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.548s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.430064] env[63028]: INFO nova.compute.claims [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.455732] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735872, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.602955] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.602955] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a91d866e-f53b-466d-b937-9b704164c570 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.609988] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 943.609988] env[63028]: value = "task-2735874" [ 943.609988] env[63028]: _type = "Task" [ 943.609988] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.618522] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735874, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.770460] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.770971] env[63028]: DEBUG oslo_vmware.api [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2735871, 'name': PowerOnVM_Task, 'duration_secs': 1.336833} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.771319] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db6b2926-723b-4a23-a6ff-b0ddb9d57a7e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.773336] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.773587] env[63028]: INFO nova.compute.manager [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Took 9.08 seconds to spawn the instance on the hypervisor. [ 943.774609] env[63028]: DEBUG nova.compute.manager [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 943.774609] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2521b38c-afe5-4577-9aee-eccbdefc89d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.785752] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 943.785752] env[63028]: value = "task-2735875" [ 943.785752] env[63028]: _type = "Task" [ 943.785752] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.796600] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735875, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.887635] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "c386c117-e255-4c3b-9a37-011e517277de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.887856] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "c386c117-e255-4c3b-9a37-011e517277de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.915643] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.953658] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735872, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565899} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.954210] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 672695c2-06f3-4790-a459-4b575baf29d3/672695c2-06f3-4790-a459-4b575baf29d3.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 943.954425] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 943.954677] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c62ebabe-6a8c-440e-8a39-134780d56e27 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.961926] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 943.961926] env[63028]: value = "task-2735876" [ 943.961926] env[63028]: _type = "Task" [ 943.961926] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.969812] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735876, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.120422] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735874, 'name': PowerOffVM_Task, 'duration_secs': 0.267757} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.121083] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 944.121893] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16269614-8057-4bb3-a91e-be616f0fb97b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.142496] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4f6777-60f2-467c-a936-6d39794109e5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.296743] env[63028]: INFO nova.compute.manager [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Took 39.04 seconds to build instance. [ 944.301458] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735875, 'name': PowerOffVM_Task, 'duration_secs': 0.357981} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.301867] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 944.301867] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance 'ed872f21-c2c4-4597-8c9e-9f8d2202b707' progress to 17 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 944.390465] env[63028]: DEBUG nova.compute.manager [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 944.471163] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735876, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074433} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.471443] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 944.472222] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f9578e-052c-43d2-b662-5a090a907528 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.496427] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 672695c2-06f3-4790-a459-4b575baf29d3/672695c2-06f3-4790-a459-4b575baf29d3.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 944.500077] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d80e483b-a817-420b-8a7b-e202ec126691 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.521995] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 944.521995] env[63028]: value = "task-2735877" [ 944.521995] env[63028]: _type = "Task" [ 944.521995] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.532884] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735877, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.653948] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 944.654368] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c3a7a5e5-ad8c-418a-b0a0-2991c3bb4e66 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.664694] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 944.664694] env[63028]: value = "task-2735878" [ 944.664694] env[63028]: _type = "Task" [ 944.664694] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.672276] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735878, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.798495] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2496a4e0-b6be-4970-8206-2bccdd164e10 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.550s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.811362] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 944.811648] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 944.811869] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 944.812073] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 944.812221] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 944.812364] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 944.812573] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 944.812790] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 944.812962] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 944.813185] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 944.813364] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 944.821088] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cc131f2-6337-4cef-a767-67c1098775e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.837270] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 944.837270] env[63028]: value = "task-2735879" [ 944.837270] env[63028]: _type = "Task" [ 944.837270] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.852419] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735879, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.886500] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd55337-33da-4878-b888-a296bc927ffa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.894745] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3147964-d2cb-4979-8f76-9446d825d1a8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.932253] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2652f8aa-ceea-45f2-bd3d-008a57ee4742 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.940801] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb4137d-1c3e-4605-9487-7814767bd8fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.945861] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.956487] env[63028]: DEBUG nova.compute.provider_tree [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.017536] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9afa90ca-b83c-440d-b8db-5679bf6e1fab tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.017724] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9afa90ca-b83c-440d-b8db-5679bf6e1fab tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.017895] env[63028]: DEBUG nova.compute.manager [None req-9afa90ca-b83c-440d-b8db-5679bf6e1fab tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 945.018954] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9f5753-61e6-410c-be89-a0e4a735533d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.026853] env[63028]: DEBUG nova.compute.manager [None req-9afa90ca-b83c-440d-b8db-5679bf6e1fab tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63028) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 945.030738] env[63028]: DEBUG nova.objects.instance [None req-9afa90ca-b83c-440d-b8db-5679bf6e1fab tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'flavor' on Instance uuid c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.036671] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735877, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.174828] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735878, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.210647] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 945.210765] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550804', 'volume_id': '0e21440a-90bd-4920-bda0-bdf25396cbd3', 'name': 'volume-0e21440a-90bd-4920-bda0-bdf25396cbd3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a50e1167-d8ed-4099-83c3-a5066ab0be1f', 'attached_at': '', 'detached_at': '', 'volume_id': '0e21440a-90bd-4920-bda0-bdf25396cbd3', 'serial': '0e21440a-90bd-4920-bda0-bdf25396cbd3'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 945.211711] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75c8561-b874-4a13-a11b-043e641c3325 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.228462] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af2dfde-42aa-4859-bb5b-f107c2d356f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.251719] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] volume-0e21440a-90bd-4920-bda0-bdf25396cbd3/volume-0e21440a-90bd-4920-bda0-bdf25396cbd3.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.251984] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f24cd5c2-666c-4ea9-bfab-d6fe6d49374a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.269762] env[63028]: DEBUG oslo_vmware.api [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 945.269762] env[63028]: value = "task-2735880" [ 945.269762] env[63028]: _type = "Task" [ 945.269762] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.277913] env[63028]: DEBUG oslo_vmware.api [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735880, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.303659] env[63028]: DEBUG nova.compute.manager [req-65148b68-0446-4f6c-b5ec-b5542dd13133 req-14ce6787-b7d1-404b-aed7-758d23bc3b0f service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Received event network-changed-b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 945.303900] env[63028]: DEBUG nova.compute.manager [req-65148b68-0446-4f6c-b5ec-b5542dd13133 req-14ce6787-b7d1-404b-aed7-758d23bc3b0f service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Refreshing instance network info cache due to event network-changed-b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 945.304086] env[63028]: DEBUG oslo_concurrency.lockutils [req-65148b68-0446-4f6c-b5ec-b5542dd13133 req-14ce6787-b7d1-404b-aed7-758d23bc3b0f service nova] Acquiring lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.304309] env[63028]: DEBUG oslo_concurrency.lockutils [req-65148b68-0446-4f6c-b5ec-b5542dd13133 req-14ce6787-b7d1-404b-aed7-758d23bc3b0f service nova] Acquired lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.304507] env[63028]: DEBUG nova.network.neutron [req-65148b68-0446-4f6c-b5ec-b5542dd13133 req-14ce6787-b7d1-404b-aed7-758d23bc3b0f service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Refreshing network info cache for port b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 945.349412] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735879, 'name': ReconfigVM_Task, 'duration_secs': 0.209246} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.349799] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance 'ed872f21-c2c4-4597-8c9e-9f8d2202b707' progress to 33 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 945.458842] env[63028]: DEBUG nova.scheduler.client.report [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.536855] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735877, 'name': ReconfigVM_Task, 'duration_secs': 0.653288} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.538160] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 672695c2-06f3-4790-a459-4b575baf29d3/672695c2-06f3-4790-a459-4b575baf29d3.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.539032] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-50a39b10-59c4-4b1e-9570-516f2ef082fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.546929] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 945.546929] env[63028]: value = "task-2735881" [ 945.546929] env[63028]: _type = "Task" [ 945.546929] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.555899] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735881, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.675016] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735878, 'name': CreateSnapshot_Task, 'duration_secs': 0.948079} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.675338] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 945.676121] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4a94b0-9e9c-4de6-a7b7-958b6f377c6e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.780878] env[63028]: DEBUG oslo_vmware.api [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735880, 'name': ReconfigVM_Task, 'duration_secs': 0.426545} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.781026] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Reconfigured VM instance instance-00000045 to attach disk [datastore2] volume-0e21440a-90bd-4920-bda0-bdf25396cbd3/volume-0e21440a-90bd-4920-bda0-bdf25396cbd3.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.786199] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a4312bf-5ba7-4309-96bd-4b535f9444f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.802205] env[63028]: DEBUG oslo_vmware.api [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 945.802205] env[63028]: value = "task-2735882" [ 945.802205] env[63028]: _type = "Task" [ 945.802205] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.812529] env[63028]: DEBUG oslo_vmware.api [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735882, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.857230] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 945.857530] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.857647] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 945.857896] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.858025] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 945.858711] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 945.858711] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 945.858711] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 945.858711] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 945.859338] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 945.859338] env[63028]: DEBUG nova.virt.hardware [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 945.865160] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Reconfiguring VM instance instance-0000004c to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 945.867564] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50bbaa71-ad4c-4bd8-a86b-c64b634451bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.887783] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 945.887783] env[63028]: value = "task-2735883" [ 945.887783] env[63028]: _type = "Task" [ 945.887783] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.901937] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735883, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.967022] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.536s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.967022] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 945.971677] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.003s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.973517] env[63028]: INFO nova.compute.claims [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 946.042377] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9afa90ca-b83c-440d-b8db-5679bf6e1fab tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 946.042678] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e17db7c7-fa95-4bf8-9507-7e85deb845e0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.049202] env[63028]: DEBUG oslo_vmware.api [None req-9afa90ca-b83c-440d-b8db-5679bf6e1fab tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 946.049202] env[63028]: value = "task-2735884" [ 946.049202] env[63028]: _type = "Task" [ 946.049202] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.061715] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735881, 'name': Rename_Task, 'duration_secs': 0.219651} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.065046] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.065334] env[63028]: DEBUG oslo_vmware.api [None req-9afa90ca-b83c-440d-b8db-5679bf6e1fab tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735884, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.065544] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dac37777-1a73-48ad-b915-f5dd061fc5d1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.072700] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 946.072700] env[63028]: value = "task-2735885" [ 946.072700] env[63028]: _type = "Task" [ 946.072700] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.082232] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735885, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.121152] env[63028]: DEBUG nova.network.neutron [req-65148b68-0446-4f6c-b5ec-b5542dd13133 req-14ce6787-b7d1-404b-aed7-758d23bc3b0f service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updated VIF entry in instance network info cache for port b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 946.121583] env[63028]: DEBUG nova.network.neutron [req-65148b68-0446-4f6c-b5ec-b5542dd13133 req-14ce6787-b7d1-404b-aed7-758d23bc3b0f service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updating instance_info_cache with network_info: [{"id": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "address": "fa:16:3e:cb:aa:63", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0d7c3ce-e8", "ovs_interfaceid": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.195055] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 946.195371] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-be5782d0-7442-4410-9dae-05f28af52c43 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.205255] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 946.205255] env[63028]: value = "task-2735886" [ 946.205255] env[63028]: _type = "Task" [ 946.205255] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.216148] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735886, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.312246] env[63028]: DEBUG oslo_vmware.api [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735882, 'name': ReconfigVM_Task, 'duration_secs': 0.138722} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.312698] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550804', 'volume_id': '0e21440a-90bd-4920-bda0-bdf25396cbd3', 'name': 'volume-0e21440a-90bd-4920-bda0-bdf25396cbd3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a50e1167-d8ed-4099-83c3-a5066ab0be1f', 'attached_at': '', 'detached_at': '', 'volume_id': '0e21440a-90bd-4920-bda0-bdf25396cbd3', 'serial': '0e21440a-90bd-4920-bda0-bdf25396cbd3'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 946.398198] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735883, 'name': ReconfigVM_Task, 'duration_secs': 0.180377} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.398519] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Reconfigured VM instance instance-0000004c to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 946.399373] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f77b9fc-dbe6-4dbc-a976-76f147e2f0db {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.422643] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] ed872f21-c2c4-4597-8c9e-9f8d2202b707/ed872f21-c2c4-4597-8c9e-9f8d2202b707.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 946.423054] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33ca2969-f293-4d13-9e2f-8e58767adaf2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.441893] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 946.441893] env[63028]: value = "task-2735887" [ 946.441893] env[63028]: _type = "Task" [ 946.441893] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.450298] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735887, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.478593] env[63028]: DEBUG nova.compute.utils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 946.482384] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 946.482549] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 946.547466] env[63028]: DEBUG nova.policy [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a320a8f1bd546eba4d3549e9f77a1be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b487a53457f64597a0dffc76fcdde6b5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 946.563946] env[63028]: DEBUG oslo_vmware.api [None req-9afa90ca-b83c-440d-b8db-5679bf6e1fab tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735884, 'name': PowerOffVM_Task, 'duration_secs': 0.180458} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.564284] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9afa90ca-b83c-440d-b8db-5679bf6e1fab tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 946.564520] env[63028]: DEBUG nova.compute.manager [None req-9afa90ca-b83c-440d-b8db-5679bf6e1fab tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 946.565417] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da5830b-80e6-491d-a63c-4b5b732464bf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.582189] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735885, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.624724] env[63028]: DEBUG oslo_concurrency.lockutils [req-65148b68-0446-4f6c-b5ec-b5542dd13133 req-14ce6787-b7d1-404b-aed7-758d23bc3b0f service nova] Releasing lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.716861] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735886, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.923523] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Successfully created port: a9da993b-1d38-4109-b090-f639a239a687 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 946.953025] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735887, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.983738] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 947.085518] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9afa90ca-b83c-440d-b8db-5679bf6e1fab tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.068s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.094909] env[63028]: DEBUG oslo_vmware.api [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735885, 'name': PowerOnVM_Task, 'duration_secs': 0.912144} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.095996] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.095996] env[63028]: INFO nova.compute.manager [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Took 9.82 seconds to spawn the instance on the hypervisor. [ 947.095996] env[63028]: DEBUG nova.compute.manager [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.096671] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54611396-2d2f-4c89-9561-1758000b30e5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.217096] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735886, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.376270] env[63028]: DEBUG nova.objects.instance [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lazy-loading 'flavor' on Instance uuid a50e1167-d8ed-4099-83c3-a5066ab0be1f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 947.437936] env[63028]: DEBUG nova.objects.instance [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'flavor' on Instance uuid c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 947.440647] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6e7f61-85c5-4e5c-8c48-567f4e451893 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.456896] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bbba3a-1d66-465a-9e58-9ef62c9e54a2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.460414] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735887, 'name': ReconfigVM_Task, 'duration_secs': 0.530014} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.461353] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Reconfigured VM instance instance-0000004c to attach disk [datastore2] ed872f21-c2c4-4597-8c9e-9f8d2202b707/ed872f21-c2c4-4597-8c9e-9f8d2202b707.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 947.461655] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance 'ed872f21-c2c4-4597-8c9e-9f8d2202b707' progress to 50 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 947.499111] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c300d93b-5c83-469d-9b46-ef8f235259ef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.504610] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e44f64d-54c1-41d2-a1fd-69fb749ac603 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.241332] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 948.247147] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41839415-4d3d-4e11-94b2-c727e1a4f9ba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.250080] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.250241] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.250401] env[63028]: DEBUG nova.network.neutron [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.250562] env[63028]: DEBUG nova.objects.instance [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'info_cache' on Instance uuid c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.251816] env[63028]: DEBUG oslo_concurrency.lockutils [None req-90adb0fe-8343-4ff2-8382-f0dd347b435a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.688s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.261926] env[63028]: DEBUG nova.compute.provider_tree [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.267076] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735886, 'name': CloneVM_Task, 'duration_secs': 1.512244} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.280531] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Created linked-clone VM from snapshot [ 948.282448] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6baa1128-33b9-4b8c-9b3c-d0329aaa7212 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.285436] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb0fd5bf-42d7-4601-b32b-4e902ede88cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.307246] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance 'ed872f21-c2c4-4597-8c9e-9f8d2202b707' progress to 67 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 948.310655] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Uploading image 398e3010-a42d-4c4b-8604-11f5a3e99cff {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 948.323124] env[63028]: INFO nova.compute.manager [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Took 36.60 seconds to build instance. [ 948.332442] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 948.332692] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 948.332867] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 948.333150] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 948.333197] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 948.333340] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 948.333545] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 948.333707] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 948.333901] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 948.334082] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 948.334248] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 948.336569] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e216893c-d03d-4bbc-ab16-f3fc84e98733 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.346686] env[63028]: DEBUG oslo_vmware.rw_handles [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 948.346686] env[63028]: value = "vm-550806" [ 948.346686] env[63028]: _type = "VirtualMachine" [ 948.346686] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 948.347941] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d880a0-beba-4229-81c5-02cc7eac2295 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.352453] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4911249b-6664-4fd5-8413-ad678a273870 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.366186] env[63028]: DEBUG oslo_vmware.rw_handles [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lease: (returnval){ [ 948.366186] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a3bd6f-74cc-9ab1-e5b4-c3bb222cebaa" [ 948.366186] env[63028]: _type = "HttpNfcLease" [ 948.366186] env[63028]: } obtained for exporting VM: (result){ [ 948.366186] env[63028]: value = "vm-550806" [ 948.366186] env[63028]: _type = "VirtualMachine" [ 948.366186] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 948.366483] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the lease: (returnval){ [ 948.366483] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a3bd6f-74cc-9ab1-e5b4-c3bb222cebaa" [ 948.366483] env[63028]: _type = "HttpNfcLease" [ 948.366483] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 948.372571] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 948.372571] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a3bd6f-74cc-9ab1-e5b4-c3bb222cebaa" [ 948.372571] env[63028]: _type = "HttpNfcLease" [ 948.372571] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 948.372806] env[63028]: DEBUG oslo_vmware.rw_handles [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 948.372806] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a3bd6f-74cc-9ab1-e5b4-c3bb222cebaa" [ 948.372806] env[63028]: _type = "HttpNfcLease" [ 948.372806] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 948.374048] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a381ea6-18e4-46d0-9248-4e93b5f52c86 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.380281] env[63028]: DEBUG oslo_vmware.rw_handles [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf1dba-0769-c600-43cc-83e0eb55a832/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 948.380448] env[63028]: DEBUG oslo_vmware.rw_handles [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf1dba-0769-c600-43cc-83e0eb55a832/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 948.472194] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5a03c323-db88-4871-90e4-14b6ff662dcf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.563941] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Successfully updated port: a9da993b-1d38-4109-b090-f639a239a687 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.754881] env[63028]: DEBUG nova.objects.base [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 948.768742] env[63028]: DEBUG nova.scheduler.client.report [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 948.825553] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94c0dc8d-b473-4783-b916-c38d54c14d8a tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "672695c2-06f3-4790-a459-4b575baf29d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.638s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.862867] env[63028]: DEBUG nova.network.neutron [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Port cd8436f9-6412-468e-bd24-f9d845d3ca21 binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 948.935496] env[63028]: DEBUG nova.compute.manager [req-a956fda2-4cd3-4ea4-9756-3b6fc2550588 req-db7c7ed3-87f8-44a4-bba2-82bd94648bb1 service nova] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Received event network-vif-plugged-a9da993b-1d38-4109-b090-f639a239a687 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 948.936400] env[63028]: DEBUG oslo_concurrency.lockutils [req-a956fda2-4cd3-4ea4-9756-3b6fc2550588 req-db7c7ed3-87f8-44a4-bba2-82bd94648bb1 service nova] Acquiring lock "bb2b405e-6207-4718-9485-0271d26c160f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.937105] env[63028]: DEBUG oslo_concurrency.lockutils [req-a956fda2-4cd3-4ea4-9756-3b6fc2550588 req-db7c7ed3-87f8-44a4-bba2-82bd94648bb1 service nova] Lock "bb2b405e-6207-4718-9485-0271d26c160f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.937297] env[63028]: DEBUG oslo_concurrency.lockutils [req-a956fda2-4cd3-4ea4-9756-3b6fc2550588 req-db7c7ed3-87f8-44a4-bba2-82bd94648bb1 service nova] Lock "bb2b405e-6207-4718-9485-0271d26c160f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.937564] env[63028]: DEBUG nova.compute.manager [req-a956fda2-4cd3-4ea4-9756-3b6fc2550588 req-db7c7ed3-87f8-44a4-bba2-82bd94648bb1 service nova] [instance: bb2b405e-6207-4718-9485-0271d26c160f] No waiting events found dispatching network-vif-plugged-a9da993b-1d38-4109-b090-f639a239a687 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 948.937812] env[63028]: WARNING nova.compute.manager [req-a956fda2-4cd3-4ea4-9756-3b6fc2550588 req-db7c7ed3-87f8-44a4-bba2-82bd94648bb1 service nova] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Received unexpected event network-vif-plugged-a9da993b-1d38-4109-b090-f639a239a687 for instance with vm_state building and task_state spawning. [ 949.066869] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "refresh_cache-bb2b405e-6207-4718-9485-0271d26c160f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.067301] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquired lock "refresh_cache-bb2b405e-6207-4718-9485-0271d26c160f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.069238] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.274318] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.303s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.274840] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 949.278171] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.255s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.279568] env[63028]: INFO nova.compute.claims [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 949.514387] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "c0693e4c-30b2-4eda-be1e-f6186d78038b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.515121] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "c0693e4c-30b2-4eda-be1e-f6186d78038b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.550115] env[63028]: DEBUG nova.network.neutron [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance_info_cache with network_info: [{"id": "6ecb125b-389c-4dce-8446-368a7298e497", "address": "fa:16:3e:f4:06:c4", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecb125b-38", "ovs_interfaceid": "6ecb125b-389c-4dce-8446-368a7298e497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.603467] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 949.615057] env[63028]: DEBUG nova.compute.manager [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 949.616124] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e0091b-ff44-4dab-b50c-e01a2270308c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.747419] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Updating instance_info_cache with network_info: [{"id": "a9da993b-1d38-4109-b090-f639a239a687", "address": "fa:16:3e:d3:f8:61", "network": {"id": "02ae797e-2c46-44a6-a214-b8c07a5f0b5f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-72321087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b487a53457f64597a0dffc76fcdde6b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "849fc06e-dfc2-470f-8490-034590682ea7", "external-id": "nsx-vlan-transportzone-567", "segmentation_id": 567, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9da993b-1d", "ovs_interfaceid": "a9da993b-1d38-4109-b090-f639a239a687", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.779919] env[63028]: DEBUG nova.compute.utils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 949.781570] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 949.781974] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 949.825704] env[63028]: DEBUG nova.policy [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a320a8f1bd546eba4d3549e9f77a1be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b487a53457f64597a0dffc76fcdde6b5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 949.890809] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.891201] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.891545] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.019107] env[63028]: DEBUG nova.compute.manager [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 950.053860] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.130379] env[63028]: INFO nova.compute.manager [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] instance snapshotting [ 950.133785] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3347ea2f-97e4-4438-9ac3-7372487039b0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.138205] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Successfully created port: bd7b8218-749b-4f6a-be35-834767306b21 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 950.157957] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37c89e4-d752-44c5-b783-193b40330be6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.250283] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Releasing lock "refresh_cache-bb2b405e-6207-4718-9485-0271d26c160f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.251033] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Instance network_info: |[{"id": "a9da993b-1d38-4109-b090-f639a239a687", "address": "fa:16:3e:d3:f8:61", "network": {"id": "02ae797e-2c46-44a6-a214-b8c07a5f0b5f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-72321087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b487a53457f64597a0dffc76fcdde6b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "849fc06e-dfc2-470f-8490-034590682ea7", "external-id": "nsx-vlan-transportzone-567", "segmentation_id": 567, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9da993b-1d", "ovs_interfaceid": "a9da993b-1d38-4109-b090-f639a239a687", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 950.251212] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:f8:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '849fc06e-dfc2-470f-8490-034590682ea7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9da993b-1d38-4109-b090-f639a239a687', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 950.260481] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Creating folder: Project (b487a53457f64597a0dffc76fcdde6b5). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 950.261244] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ff6d2c4-e4bb-42bb-8962-66fe5b22e008 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.275626] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Created folder: Project (b487a53457f64597a0dffc76fcdde6b5) in parent group-v550570. [ 950.275626] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Creating folder: Instances. Parent ref: group-v550807. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 950.276347] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51e443ac-f9da-4b84-ab45-6ab451ac1eb6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.286016] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 950.288867] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Created folder: Instances in parent group-v550807. [ 950.289322] env[63028]: DEBUG oslo.service.loopingcall [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 950.292771] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 950.299019] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d044096-f8f8-4112-b63e-5f6076cb0b43 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.329948] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 950.329948] env[63028]: value = "task-2735891" [ 950.329948] env[63028]: _type = "Task" [ 950.329948] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.342494] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735891, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.544490] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.669683] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 950.670504] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1d9e122a-31f3-4f37-a2d2-9b2334fc51a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.680067] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 950.680067] env[63028]: value = "task-2735892" [ 950.680067] env[63028]: _type = "Task" [ 950.680067] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.688922] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735892, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.727863] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5064008f-85da-48ed-b8c2-d8fc1ca022b7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.736193] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b9d7aa-82d5-4923-8792-8971c312a8d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.768046] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4150e86-0614-4241-825a-7283e21dad19 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.775374] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fbb5ec-417b-4ef6-89d2-a157e7d08b57 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.789304] env[63028]: DEBUG nova.compute.provider_tree [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.843250] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735891, 'name': CreateVM_Task, 'duration_secs': 0.345007} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.843525] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 950.844617] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.844717] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.845124] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 950.845412] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acdbe5f5-7847-4d1a-a8a9-f9ea61e51002 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.851056] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 950.851056] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a291a6-8695-435f-38ac-f38ba8e8f42f" [ 950.851056] env[63028]: _type = "Task" [ 950.851056] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.861684] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a291a6-8695-435f-38ac-f38ba8e8f42f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.941761] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.942245] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.942455] env[63028]: DEBUG nova.network.neutron [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 950.971088] env[63028]: DEBUG nova.compute.manager [req-af816cc7-b527-429a-8bd4-7d9c12ea4b70 req-485514fd-c122-4422-8665-177a70cf5da8 service nova] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Received event network-changed-a9da993b-1d38-4109-b090-f639a239a687 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 950.971320] env[63028]: DEBUG nova.compute.manager [req-af816cc7-b527-429a-8bd4-7d9c12ea4b70 req-485514fd-c122-4422-8665-177a70cf5da8 service nova] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Refreshing instance network info cache due to event network-changed-a9da993b-1d38-4109-b090-f639a239a687. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 950.971599] env[63028]: DEBUG oslo_concurrency.lockutils [req-af816cc7-b527-429a-8bd4-7d9c12ea4b70 req-485514fd-c122-4422-8665-177a70cf5da8 service nova] Acquiring lock "refresh_cache-bb2b405e-6207-4718-9485-0271d26c160f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.971790] env[63028]: DEBUG oslo_concurrency.lockutils [req-af816cc7-b527-429a-8bd4-7d9c12ea4b70 req-485514fd-c122-4422-8665-177a70cf5da8 service nova] Acquired lock "refresh_cache-bb2b405e-6207-4718-9485-0271d26c160f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.971975] env[63028]: DEBUG nova.network.neutron [req-af816cc7-b527-429a-8bd4-7d9c12ea4b70 req-485514fd-c122-4422-8665-177a70cf5da8 service nova] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Refreshing network info cache for port a9da993b-1d38-4109-b090-f639a239a687 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 951.061861] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.062287] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2a2c5a9-2fc1-4e1e-859b-a175d60228f3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.070714] env[63028]: DEBUG oslo_vmware.api [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 951.070714] env[63028]: value = "task-2735893" [ 951.070714] env[63028]: _type = "Task" [ 951.070714] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.079065] env[63028]: DEBUG oslo_vmware.api [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735893, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.193010] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735892, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.292460] env[63028]: DEBUG nova.scheduler.client.report [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.299228] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 951.328994] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 951.329253] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.329455] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 951.329672] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.329916] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 951.330031] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 951.330304] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 951.330497] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 951.330685] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 951.333913] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 951.333913] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 951.333913] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2aef16-f8e6-4c75-8796-c93d1284704a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.342217] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a023bd6c-b0fc-4807-ba7a-0c4ddf3c0de3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.364721] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a291a6-8695-435f-38ac-f38ba8e8f42f, 'name': SearchDatastore_Task, 'duration_secs': 0.011437} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.365033] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.365282] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 951.365516] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.365661] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.365837] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.366405] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c0cbfea-4541-4cd9-b659-59fd592eb948 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.374555] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.374818] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 951.375671] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c854bb1-491d-4197-9232-5145c1f96e27 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.381237] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 951.381237] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52628316-82d0-8c90-7de3-4197cf07ba41" [ 951.381237] env[63028]: _type = "Task" [ 951.381237] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.389027] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52628316-82d0-8c90-7de3-4197cf07ba41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.582536] env[63028]: DEBUG oslo_vmware.api [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2735893, 'name': PowerOnVM_Task, 'duration_secs': 0.415194} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.582851] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 951.583075] env[63028]: DEBUG nova.compute.manager [None req-c07d26e6-05ba-4d73-804d-5fdcd0410afc tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 951.583866] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10002ac-f3ef-49b8-8d05-a9df2f399299 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.696118] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735892, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.726179] env[63028]: DEBUG nova.network.neutron [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance_info_cache with network_info: [{"id": "cd8436f9-6412-468e-bd24-f9d845d3ca21", "address": "fa:16:3e:ff:70:93", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd8436f9-64", "ovs_interfaceid": "cd8436f9-6412-468e-bd24-f9d845d3ca21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.797948] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.520s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.798531] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 951.801669] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.816s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.801878] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.804625] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.754s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.804865] env[63028]: DEBUG nova.objects.instance [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lazy-loading 'resources' on Instance uuid 600195de-ceb4-41a6-9ade-dda8b898e4db {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 951.829470] env[63028]: INFO nova.scheduler.client.report [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleted allocations for instance c06813c4-472d-4bf9-84ec-0d01306bcd48 [ 951.856687] env[63028]: DEBUG nova.network.neutron [req-af816cc7-b527-429a-8bd4-7d9c12ea4b70 req-485514fd-c122-4422-8665-177a70cf5da8 service nova] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Updated VIF entry in instance network info cache for port a9da993b-1d38-4109-b090-f639a239a687. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 951.857093] env[63028]: DEBUG nova.network.neutron [req-af816cc7-b527-429a-8bd4-7d9c12ea4b70 req-485514fd-c122-4422-8665-177a70cf5da8 service nova] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Updating instance_info_cache with network_info: [{"id": "a9da993b-1d38-4109-b090-f639a239a687", "address": "fa:16:3e:d3:f8:61", "network": {"id": "02ae797e-2c46-44a6-a214-b8c07a5f0b5f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-72321087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b487a53457f64597a0dffc76fcdde6b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "849fc06e-dfc2-470f-8490-034590682ea7", "external-id": "nsx-vlan-transportzone-567", "segmentation_id": 567, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9da993b-1d", "ovs_interfaceid": "a9da993b-1d38-4109-b090-f639a239a687", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.893297] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52628316-82d0-8c90-7de3-4197cf07ba41, 'name': SearchDatastore_Task, 'duration_secs': 0.017273} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.894113] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41053113-fd6e-4639-843f-63e4d2be3c62 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.900235] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 951.900235] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526a20e4-32d1-6661-9a2f-357eea711641" [ 951.900235] env[63028]: _type = "Task" [ 951.900235] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.908795] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526a20e4-32d1-6661-9a2f-357eea711641, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.961039] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Successfully updated port: bd7b8218-749b-4f6a-be35-834767306b21 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 952.191827] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735892, 'name': CreateSnapshot_Task, 'duration_secs': 1.046184} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.192219] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 952.193054] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed36dcf7-3d58-485f-98ab-8db68d101b91 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.229765] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.308406] env[63028]: DEBUG nova.compute.utils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 952.312847] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 952.313119] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 952.336589] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bb9e4c9-b947-4815-9882-bdf6878d6f4d tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "c06813c4-472d-4bf9-84ec-0d01306bcd48" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.872s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.360932] env[63028]: DEBUG oslo_concurrency.lockutils [req-af816cc7-b527-429a-8bd4-7d9c12ea4b70 req-485514fd-c122-4422-8665-177a70cf5da8 service nova] Releasing lock "refresh_cache-bb2b405e-6207-4718-9485-0271d26c160f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.382684] env[63028]: DEBUG nova.policy [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a320a8f1bd546eba4d3549e9f77a1be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b487a53457f64597a0dffc76fcdde6b5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 952.410312] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526a20e4-32d1-6661-9a2f-357eea711641, 'name': SearchDatastore_Task, 'duration_secs': 0.02194} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.410583] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.410854] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] bb2b405e-6207-4718-9485-0271d26c160f/bb2b405e-6207-4718-9485-0271d26c160f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 952.411184] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-966e036c-c15f-41f1-be3d-156598c61048 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.421429] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 952.421429] env[63028]: value = "task-2735894" [ 952.421429] env[63028]: _type = "Task" [ 952.421429] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.432919] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735894, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.466234] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "refresh_cache-a97224e8-d69b-4c62-ab96-7cef037ef39b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.466234] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquired lock "refresh_cache-a97224e8-d69b-4c62-ab96-7cef037ef39b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.466600] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.714902] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 952.719177] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a96966b7-81ba-48cf-8413-3c02d05bebfb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.729277] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 952.729277] env[63028]: value = "task-2735895" [ 952.729277] env[63028]: _type = "Task" [ 952.729277] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.735019] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f4db1c-3a2b-40f1-9715-392bcff390bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.746805] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735895, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.750672] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Successfully created port: 298e34ad-d45e-40ed-aa57-0a93af59226f {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 952.755981] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6432f37-2cbb-4c9e-b98b-9ce30bb7207b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.788766] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0999b6d6-d0d0-43f3-aabb-1ac09e806283 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.792292] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1754324f-67ef-4b94-896a-6cf7ec674a54 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.817019] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048dfbc9-369e-49e7-9311-1ba3baa6b3c4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.818936] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 952.822332] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88cc280-ff8f-4e89-9222-c24c76d19b5c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.838874] env[63028]: DEBUG nova.compute.provider_tree [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.843440] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance 'ed872f21-c2c4-4597-8c9e-9f8d2202b707' progress to 83 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 952.932727] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735894, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.107481] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.199174] env[63028]: DEBUG nova.compute.manager [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Received event network-vif-plugged-bd7b8218-749b-4f6a-be35-834767306b21 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 953.199174] env[63028]: DEBUG oslo_concurrency.lockutils [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] Acquiring lock "a97224e8-d69b-4c62-ab96-7cef037ef39b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.199174] env[63028]: DEBUG oslo_concurrency.lockutils [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] Lock "a97224e8-d69b-4c62-ab96-7cef037ef39b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.199527] env[63028]: DEBUG oslo_concurrency.lockutils [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] Lock "a97224e8-d69b-4c62-ab96-7cef037ef39b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.199777] env[63028]: DEBUG nova.compute.manager [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] No waiting events found dispatching network-vif-plugged-bd7b8218-749b-4f6a-be35-834767306b21 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 953.200259] env[63028]: WARNING nova.compute.manager [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Received unexpected event network-vif-plugged-bd7b8218-749b-4f6a-be35-834767306b21 for instance with vm_state building and task_state spawning. [ 953.200492] env[63028]: DEBUG nova.compute.manager [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Received event network-changed-bd7b8218-749b-4f6a-be35-834767306b21 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 953.200707] env[63028]: DEBUG nova.compute.manager [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Refreshing instance network info cache due to event network-changed-bd7b8218-749b-4f6a-be35-834767306b21. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 953.200886] env[63028]: DEBUG oslo_concurrency.lockutils [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] Acquiring lock "refresh_cache-a97224e8-d69b-4c62-ab96-7cef037ef39b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.225761] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "d41a1eae-bb89-4222-9466-d86af891c654" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.226042] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "d41a1eae-bb89-4222-9466-d86af891c654" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.241178] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735895, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.348244] env[63028]: DEBUG nova.scheduler.client.report [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.353985] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 953.354562] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61159742-a780-4b4d-b6a7-69b9cbe26358 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.362862] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 953.362862] env[63028]: value = "task-2735896" [ 953.362862] env[63028]: _type = "Task" [ 953.362862] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.372890] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735896, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.402449] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Updating instance_info_cache with network_info: [{"id": "bd7b8218-749b-4f6a-be35-834767306b21", "address": "fa:16:3e:e5:ab:9a", "network": {"id": "02ae797e-2c46-44a6-a214-b8c07a5f0b5f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-72321087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b487a53457f64597a0dffc76fcdde6b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "849fc06e-dfc2-470f-8490-034590682ea7", "external-id": "nsx-vlan-transportzone-567", "segmentation_id": 567, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd7b8218-74", "ovs_interfaceid": "bd7b8218-749b-4f6a-be35-834767306b21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.432127] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735894, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.774347} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.432422] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] bb2b405e-6207-4718-9485-0271d26c160f/bb2b405e-6207-4718-9485-0271d26c160f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.432636] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.432919] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b310efb-9f6e-473b-8805-37d18d2c3560 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.440533] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 953.440533] env[63028]: value = "task-2735897" [ 953.440533] env[63028]: _type = "Task" [ 953.440533] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.449158] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735897, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.731234] env[63028]: DEBUG nova.compute.manager [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 953.745700] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735895, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.832320] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 953.855750] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.051s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.860211] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 953.860484] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 953.860619] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 953.860811] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 953.860980] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 953.861153] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 953.861370] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 953.861534] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 953.861707] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 953.861909] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 953.862131] env[63028]: DEBUG nova.virt.hardware [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 953.862673] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 24.533s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.862864] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.863041] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63028) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 953.863480] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.470s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.863540] env[63028]: DEBUG nova.objects.instance [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lazy-loading 'resources' on Instance uuid a1d00736-1a8d-46e0-9358-46e848b94797 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.865284] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d590b4-1b7a-4691-97d4-6909a48f44af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.870527] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2243019-84ca-4158-8a9b-74e3a9998883 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.887210] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aac2384-3c39-4bc2-a8c4-39cb9c7ce168 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.895434] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd49a11-8751-475f-a038-4a7813fbe4df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.899336] env[63028]: DEBUG oslo_vmware.api [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2735896, 'name': PowerOnVM_Task, 'duration_secs': 0.443092} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.900319] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 953.900515] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fd64ce-97ac-48ac-9f55-22966efb1559 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance 'ed872f21-c2c4-4597-8c9e-9f8d2202b707' progress to 100 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 953.912646] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Releasing lock "refresh_cache-a97224e8-d69b-4c62-ab96-7cef037ef39b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.913014] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Instance network_info: |[{"id": "bd7b8218-749b-4f6a-be35-834767306b21", "address": "fa:16:3e:e5:ab:9a", "network": {"id": "02ae797e-2c46-44a6-a214-b8c07a5f0b5f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-72321087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b487a53457f64597a0dffc76fcdde6b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "849fc06e-dfc2-470f-8490-034590682ea7", "external-id": "nsx-vlan-transportzone-567", "segmentation_id": 567, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd7b8218-74", "ovs_interfaceid": "bd7b8218-749b-4f6a-be35-834767306b21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 953.914048] env[63028]: DEBUG oslo_concurrency.lockutils [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] Acquired lock "refresh_cache-a97224e8-d69b-4c62-ab96-7cef037ef39b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.914048] env[63028]: DEBUG nova.network.neutron [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Refreshing network info cache for port bd7b8218-749b-4f6a-be35-834767306b21 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 953.915167] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:ab:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '849fc06e-dfc2-470f-8490-034590682ea7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd7b8218-749b-4f6a-be35-834767306b21', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 953.922911] env[63028]: DEBUG oslo.service.loopingcall [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 953.932214] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 953.932613] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-491f2fec-b270-46e0-b3cc-361568cd2e52 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.947758] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568c7831-23a2-489d-803d-1966cdfb5d0d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.962223] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58c27bf-4e58-4ec2-a47e-e4b6e5c0c793 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.965357] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735897, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071645} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.965549] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 953.965549] env[63028]: value = "task-2735898" [ 953.965549] env[63028]: _type = "Task" [ 953.965549] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.966428] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.967516] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce373e2-60cf-4bdf-b723-b926f7454997 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.998261] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177625MB free_disk=110GB free_vcpus=48 pci_devices=None {{(pid=63028) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 953.998433] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.004133] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735898, 'name': CreateVM_Task} progress is 15%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.023142] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] bb2b405e-6207-4718-9485-0271d26c160f/bb2b405e-6207-4718-9485-0271d26c160f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.023870] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6e85f3d-df6e-4f45-8b53-229e68ca592c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.044265] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 954.044265] env[63028]: value = "task-2735899" [ 954.044265] env[63028]: _type = "Task" [ 954.044265] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.052604] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735899, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.247363] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735895, 'name': CloneVM_Task} progress is 95%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.260546] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.390521] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a99bd000-6ba3-407c-966a-91a9728c4438 tempest-ServerActionsV293TestJSON-560395623 tempest-ServerActionsV293TestJSON-560395623-project-member] Lock "600195de-ceb4-41a6-9ade-dda8b898e4db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.677s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.480884] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735898, 'name': CreateVM_Task, 'duration_secs': 0.510515} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.481076] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 954.481981] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.482108] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.482456] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 954.482725] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa1fe359-a462-4b57-a97c-faf8937964fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.488500] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 954.488500] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b9b1c3-febe-aef1-8263-82d92414d921" [ 954.488500] env[63028]: _type = "Task" [ 954.488500] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.510089] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b9b1c3-febe-aef1-8263-82d92414d921, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.556979] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735899, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.750946] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735895, 'name': CloneVM_Task, 'duration_secs': 1.768336} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.751270] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Created linked-clone VM from snapshot [ 954.752160] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191caaba-075e-45bc-884d-e45283e6b891 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.760625] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Uploading image 905bc147-e100-43e5-8cff-6b621d1497a9 {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 954.776055] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 954.776406] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fc3e3637-d3b2-4ad6-a58a-82b4fb8a7ffb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.786016] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 954.786016] env[63028]: value = "task-2735900" [ 954.786016] env[63028]: _type = "Task" [ 954.786016] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.794626] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735900, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.865525] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26559aaa-7f43-43d5-91a8-c218d79b1dd7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.873731] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996d1863-a770-4b1f-9a72-c13a48970f4b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.912493] env[63028]: DEBUG nova.network.neutron [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Updated VIF entry in instance network info cache for port bd7b8218-749b-4f6a-be35-834767306b21. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 954.912911] env[63028]: DEBUG nova.network.neutron [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Updating instance_info_cache with network_info: [{"id": "bd7b8218-749b-4f6a-be35-834767306b21", "address": "fa:16:3e:e5:ab:9a", "network": {"id": "02ae797e-2c46-44a6-a214-b8c07a5f0b5f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-72321087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b487a53457f64597a0dffc76fcdde6b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "849fc06e-dfc2-470f-8490-034590682ea7", "external-id": "nsx-vlan-transportzone-567", "segmentation_id": 567, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd7b8218-74", "ovs_interfaceid": "bd7b8218-749b-4f6a-be35-834767306b21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.918020] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-addcebab-d5e8-4fea-bf9d-073c6b78c1cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.927913] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf70a9c-f430-46ae-8cd5-88623120e700 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.944293] env[63028]: DEBUG nova.compute.provider_tree [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.003187] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b9b1c3-febe-aef1-8263-82d92414d921, 'name': SearchDatastore_Task, 'duration_secs': 0.015123} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.003607] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.003882] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 955.004159] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.004307] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.004687] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 955.011038] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b120ca5-74ba-47ae-a44d-83e3c5227fc2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.011038] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Successfully updated port: 298e34ad-d45e-40ed-aa57-0a93af59226f {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 955.015598] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 955.015794] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 955.016656] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32987a79-6ba8-4b12-9b07-c09fecfd0865 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.022729] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 955.022729] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c00f6a-1a77-0d81-cd8b-faa498553433" [ 955.022729] env[63028]: _type = "Task" [ 955.022729] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.033337] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c00f6a-1a77-0d81-cd8b-faa498553433, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.055088] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735899, 'name': ReconfigVM_Task, 'duration_secs': 0.573243} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.055369] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Reconfigured VM instance instance-00000052 to attach disk [datastore1] bb2b405e-6207-4718-9485-0271d26c160f/bb2b405e-6207-4718-9485-0271d26c160f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.056033] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c38909dd-17e1-4e8c-8e22-28571c42ecf5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.062870] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 955.062870] env[63028]: value = "task-2735901" [ 955.062870] env[63028]: _type = "Task" [ 955.062870] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.070988] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735901, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.241945] env[63028]: DEBUG nova.compute.manager [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Received event network-vif-plugged-298e34ad-d45e-40ed-aa57-0a93af59226f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 955.242219] env[63028]: DEBUG oslo_concurrency.lockutils [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] Acquiring lock "e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.242444] env[63028]: DEBUG oslo_concurrency.lockutils [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] Lock "e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.242680] env[63028]: DEBUG oslo_concurrency.lockutils [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] Lock "e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.242846] env[63028]: DEBUG nova.compute.manager [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] No waiting events found dispatching network-vif-plugged-298e34ad-d45e-40ed-aa57-0a93af59226f {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 955.243099] env[63028]: WARNING nova.compute.manager [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Received unexpected event network-vif-plugged-298e34ad-d45e-40ed-aa57-0a93af59226f for instance with vm_state building and task_state spawning. [ 955.243258] env[63028]: DEBUG nova.compute.manager [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Received event network-changed-298e34ad-d45e-40ed-aa57-0a93af59226f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 955.243421] env[63028]: DEBUG nova.compute.manager [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Refreshing instance network info cache due to event network-changed-298e34ad-d45e-40ed-aa57-0a93af59226f. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 955.243724] env[63028]: DEBUG oslo_concurrency.lockutils [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] Acquiring lock "refresh_cache-e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.243851] env[63028]: DEBUG oslo_concurrency.lockutils [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] Acquired lock "refresh_cache-e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.244031] env[63028]: DEBUG nova.network.neutron [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Refreshing network info cache for port 298e34ad-d45e-40ed-aa57-0a93af59226f {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 955.296124] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735900, 'name': Destroy_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.419740] env[63028]: DEBUG oslo_concurrency.lockutils [req-9ca26e18-46cc-402b-8b1f-7a520ab4f3ff req-ca3b85c9-7028-4091-8b62-80df684ed18d service nova] Releasing lock "refresh_cache-a97224e8-d69b-4c62-ab96-7cef037ef39b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.450786] env[63028]: DEBUG nova.scheduler.client.report [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 955.514284] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "refresh_cache-e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.536120] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c00f6a-1a77-0d81-cd8b-faa498553433, 'name': SearchDatastore_Task, 'duration_secs': 0.017348} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.536865] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a54ab55-1350-43e5-9a4d-21a7bc8ed51b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.542827] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 955.542827] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c89746-82a1-e774-8f54-b0b62370b69a" [ 955.542827] env[63028]: _type = "Task" [ 955.542827] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.551769] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c89746-82a1-e774-8f54-b0b62370b69a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.573015] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735901, 'name': Rename_Task, 'duration_secs': 0.22223} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.573507] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 955.573807] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f72438ee-c5c2-49f4-83d2-bf3ce71cdec9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.580036] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 955.580036] env[63028]: value = "task-2735902" [ 955.580036] env[63028]: _type = "Task" [ 955.580036] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.587770] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735902, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.670425] env[63028]: DEBUG oslo_concurrency.lockutils [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.670640] env[63028]: DEBUG oslo_concurrency.lockutils [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.670868] env[63028]: DEBUG nova.compute.manager [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Going to confirm migration 4 {{(pid=63028) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 955.781028] env[63028]: DEBUG nova.network.neutron [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.796811] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735900, 'name': Destroy_Task, 'duration_secs': 0.599015} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.797138] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Destroyed the VM [ 955.797431] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 955.797740] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5591a67e-5cfb-4c9d-ba06-87ec0801bb19 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.805354] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 955.805354] env[63028]: value = "task-2735903" [ 955.805354] env[63028]: _type = "Task" [ 955.805354] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.813500] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735903, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.873330] env[63028]: DEBUG nova.network.neutron [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.954048] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.090s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.957373] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.910s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.959174] env[63028]: INFO nova.compute.claims [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.990844] env[63028]: INFO nova.scheduler.client.report [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Deleted allocations for instance a1d00736-1a8d-46e0-9358-46e848b94797 [ 956.055785] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c89746-82a1-e774-8f54-b0b62370b69a, 'name': SearchDatastore_Task, 'duration_secs': 0.01177} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.056152] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.056451] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] a97224e8-d69b-4c62-ab96-7cef037ef39b/a97224e8-d69b-4c62-ab96-7cef037ef39b.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 956.056735] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2aa1143e-6c53-46e5-8cbe-b26059840d2c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.065034] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 956.065034] env[63028]: value = "task-2735904" [ 956.065034] env[63028]: _type = "Task" [ 956.065034] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.073736] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.092047] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735902, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.217107] env[63028]: DEBUG oslo_concurrency.lockutils [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.217256] env[63028]: DEBUG oslo_concurrency.lockutils [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.217417] env[63028]: DEBUG nova.network.neutron [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.217603] env[63028]: DEBUG nova.objects.instance [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lazy-loading 'info_cache' on Instance uuid ed872f21-c2c4-4597-8c9e-9f8d2202b707 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.316765] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735903, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.375868] env[63028]: DEBUG oslo_concurrency.lockutils [req-467304a2-4859-4cb5-ad1c-b04e24e1ac58 req-7a1ed18e-1bcf-4a27-a5a4-dffa6607a1b1 service nova] Releasing lock "refresh_cache-e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.376501] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquired lock "refresh_cache-e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.376724] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.499930] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f5af781c-4293-48a4-9764-e0ab14cb4c37 tempest-ServersWithSpecificFlavorTestJSON-243894015 tempest-ServersWithSpecificFlavorTestJSON-243894015-project-member] Lock "a1d00736-1a8d-46e0-9358-46e848b94797" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.803s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.576578] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735904, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.593718] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735902, 'name': PowerOnVM_Task, 'duration_secs': 0.713589} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.593718] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 956.593718] env[63028]: INFO nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Took 8.35 seconds to spawn the instance on the hypervisor. [ 956.593942] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 956.594660] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8573247-2070-43e7-b378-abf99948ce39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.816690] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735903, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.930611] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 957.080819] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735904, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.715459} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.081061] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] a97224e8-d69b-4c62-ab96-7cef037ef39b/a97224e8-d69b-4c62-ab96-7cef037ef39b.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 957.081282] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 957.081543] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4da9c74-4e71-47b2-a40d-07dce0fd7586 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.088355] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 957.088355] env[63028]: value = "task-2735905" [ 957.088355] env[63028]: _type = "Task" [ 957.088355] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.096386] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735905, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.097377] env[63028]: DEBUG nova.network.neutron [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Updating instance_info_cache with network_info: [{"id": "298e34ad-d45e-40ed-aa57-0a93af59226f", "address": "fa:16:3e:63:03:28", "network": {"id": "02ae797e-2c46-44a6-a214-b8c07a5f0b5f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-72321087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b487a53457f64597a0dffc76fcdde6b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "849fc06e-dfc2-470f-8490-034590682ea7", "external-id": "nsx-vlan-transportzone-567", "segmentation_id": 567, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap298e34ad-d4", "ovs_interfaceid": "298e34ad-d45e-40ed-aa57-0a93af59226f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.113557] env[63028]: INFO nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Took 32.26 seconds to build instance. [ 957.320247] env[63028]: DEBUG oslo_vmware.api [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735903, 'name': RemoveSnapshot_Task, 'duration_secs': 1.065722} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.320247] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 957.401550] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3ca759-929b-495c-9e62-12a7a70e1346 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.410511] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98aff05b-a9bf-4c04-a2fb-ef4636dbd713 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.449496] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7488827-4896-4377-95f0-1526ba689dc6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.458400] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe966f27-0c84-482c-9a27-ff65f2f4b58a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.474844] env[63028]: DEBUG nova.compute.provider_tree [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.522255] env[63028]: DEBUG nova.network.neutron [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance_info_cache with network_info: [{"id": "cd8436f9-6412-468e-bd24-f9d845d3ca21", "address": "fa:16:3e:ff:70:93", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd8436f9-64", "ovs_interfaceid": "cd8436f9-6412-468e-bd24-f9d845d3ca21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.600046] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735905, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101401} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.600046] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 957.600898] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787424a9-374e-454c-aeec-ded79458c992 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.604529] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Releasing lock "refresh_cache-e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.605009] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Instance network_info: |[{"id": "298e34ad-d45e-40ed-aa57-0a93af59226f", "address": "fa:16:3e:63:03:28", "network": {"id": "02ae797e-2c46-44a6-a214-b8c07a5f0b5f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-72321087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b487a53457f64597a0dffc76fcdde6b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "849fc06e-dfc2-470f-8490-034590682ea7", "external-id": "nsx-vlan-transportzone-567", "segmentation_id": 567, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap298e34ad-d4", "ovs_interfaceid": "298e34ad-d45e-40ed-aa57-0a93af59226f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 957.605608] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:03:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '849fc06e-dfc2-470f-8490-034590682ea7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '298e34ad-d45e-40ed-aa57-0a93af59226f', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 957.616498] env[63028]: DEBUG oslo.service.loopingcall [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 957.617473] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "bb2b405e-6207-4718-9485-0271d26c160f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.769s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.617700] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 957.617968] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6a8719c-6a5e-463b-ad2e-3077c690566b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.651119] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] a97224e8-d69b-4c62-ab96-7cef037ef39b/a97224e8-d69b-4c62-ab96-7cef037ef39b.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.651956] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14a06b2a-31e6-4a87-81d3-a656a0a92aa5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.667700] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 957.667700] env[63028]: value = "task-2735906" [ 957.667700] env[63028]: _type = "Task" [ 957.667700] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.673212] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 957.673212] env[63028]: value = "task-2735907" [ 957.673212] env[63028]: _type = "Task" [ 957.673212] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.681832] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735906, 'name': CreateVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.685327] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735907, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.788447] env[63028]: DEBUG oslo_vmware.rw_handles [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf1dba-0769-c600-43cc-83e0eb55a832/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 957.789377] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b6902d-d462-4666-a0b7-84161d60b047 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.796149] env[63028]: DEBUG oslo_vmware.rw_handles [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf1dba-0769-c600-43cc-83e0eb55a832/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 957.796344] env[63028]: ERROR oslo_vmware.rw_handles [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf1dba-0769-c600-43cc-83e0eb55a832/disk-0.vmdk due to incomplete transfer. [ 957.796578] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0c4b96c9-d743-4b50-b1e4-6e5cf17e0275 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.806248] env[63028]: DEBUG oslo_vmware.rw_handles [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf1dba-0769-c600-43cc-83e0eb55a832/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 957.806460] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Uploaded image 398e3010-a42d-4c4b-8604-11f5a3e99cff to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 957.808828] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 957.809106] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-06312310-f44f-4191-b7e6-78523e7fb2f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.815371] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 957.815371] env[63028]: value = "task-2735908" [ 957.815371] env[63028]: _type = "Task" [ 957.815371] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.823408] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735908, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.825455] env[63028]: WARNING nova.compute.manager [None req-1ed24b34-6e66-4139-ac39-4e53ae41d1cf tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Image not found during snapshot: nova.exception.ImageNotFound: Image 905bc147-e100-43e5-8cff-6b621d1497a9 could not be found. [ 957.979383] env[63028]: DEBUG nova.scheduler.client.report [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 958.025471] env[63028]: DEBUG oslo_concurrency.lockutils [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "refresh_cache-ed872f21-c2c4-4597-8c9e-9f8d2202b707" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.025654] env[63028]: DEBUG nova.objects.instance [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lazy-loading 'migration_context' on Instance uuid ed872f21-c2c4-4597-8c9e-9f8d2202b707 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.182032] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735906, 'name': CreateVM_Task, 'duration_secs': 0.483256} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.185084] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.186935] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.187212] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.187547] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 958.188312] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-181f4403-b9c9-49ab-b750-d715a033f08e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.195052] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735907, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.198760] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 958.198760] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5271f877-d1eb-3d82-a9b1-c63b15eacb37" [ 958.198760] env[63028]: _type = "Task" [ 958.198760] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.208587] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5271f877-d1eb-3d82-a9b1-c63b15eacb37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.326523] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735908, 'name': Destroy_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.486424] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.529s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.487050] env[63028]: DEBUG nova.compute.manager [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 958.489944] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.111s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.491354] env[63028]: INFO nova.compute.claims [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 958.528858] env[63028]: DEBUG nova.objects.base [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 958.532992] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91bb544-43e9-4ed1-8b47-306598cb8b2c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.556956] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffa21988-72a5-4b76-832c-a64d28b6fb6a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.563056] env[63028]: DEBUG oslo_vmware.api [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 958.563056] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e8450e-c6f7-f857-4b69-be18ef31565a" [ 958.563056] env[63028]: _type = "Task" [ 958.563056] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.571769] env[63028]: DEBUG oslo_vmware.api [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e8450e-c6f7-f857-4b69-be18ef31565a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.691567] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735907, 'name': ReconfigVM_Task, 'duration_secs': 0.636307} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.693450] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Reconfigured VM instance instance-00000053 to attach disk [datastore1] a97224e8-d69b-4c62-ab96-7cef037ef39b/a97224e8-d69b-4c62-ab96-7cef037ef39b.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.694474] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a7ca6833-8844-4370-97c3-32483c3a9dbf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.708074] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 958.708074] env[63028]: value = "task-2735909" [ 958.708074] env[63028]: _type = "Task" [ 958.708074] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.715478] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5271f877-d1eb-3d82-a9b1-c63b15eacb37, 'name': SearchDatastore_Task, 'duration_secs': 0.017223} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.716304] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.716583] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 958.716838] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.716990] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.717192] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.717467] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-508858bc-ea8d-4d6e-99bb-b561da6962f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.730991] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735909, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.741882] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.742069] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 958.745361] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bb74aeb-06b1-4c09-9a8a-4d6e2b464dc2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.748659] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 958.748659] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5238be62-fa5f-fea5-eb1d-aa8bc7188b99" [ 958.748659] env[63028]: _type = "Task" [ 958.748659] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.758162] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5238be62-fa5f-fea5-eb1d-aa8bc7188b99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.826747] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735908, 'name': Destroy_Task, 'duration_secs': 0.753647} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.826895] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Destroyed the VM [ 958.827211] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 958.827395] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0c3d9542-c850-4b18-96e9-fa921ee494dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.834159] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 958.834159] env[63028]: value = "task-2735910" [ 958.834159] env[63028]: _type = "Task" [ 958.834159] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.842414] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735910, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.996527] env[63028]: DEBUG nova.compute.utils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 958.999939] env[63028]: DEBUG nova.compute.manager [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 959.000292] env[63028]: DEBUG nova.network.neutron [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 959.039039] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "672695c2-06f3-4790-a459-4b575baf29d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.039314] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "672695c2-06f3-4790-a459-4b575baf29d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.039529] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "672695c2-06f3-4790-a459-4b575baf29d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.039708] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "672695c2-06f3-4790-a459-4b575baf29d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.039881] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "672695c2-06f3-4790-a459-4b575baf29d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.042732] env[63028]: DEBUG nova.policy [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43ed2fb3f1a944fdac8ee7778f171cd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8efc6d89903c454eb39136a76e0adef5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 959.044659] env[63028]: INFO nova.compute.manager [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Terminating instance [ 959.078342] env[63028]: DEBUG oslo_vmware.api [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e8450e-c6f7-f857-4b69-be18ef31565a, 'name': SearchDatastore_Task, 'duration_secs': 0.027714} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.078342] env[63028]: DEBUG oslo_concurrency.lockutils [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.218937] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735909, 'name': Rename_Task, 'duration_secs': 0.253817} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.219425] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.219605] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef4d41d4-c9e6-49a0-b45a-4165aa05621a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.226058] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 959.226058] env[63028]: value = "task-2735911" [ 959.226058] env[63028]: _type = "Task" [ 959.226058] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.236258] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735911, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.259542] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5238be62-fa5f-fea5-eb1d-aa8bc7188b99, 'name': SearchDatastore_Task, 'duration_secs': 0.030914} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.260467] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-366fff98-f5ba-495c-b47d-1b058a2cbe2c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.266445] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 959.266445] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e7858f-9655-85c5-6d96-e9c4ba1101fa" [ 959.266445] env[63028]: _type = "Task" [ 959.266445] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.276864] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e7858f-9655-85c5-6d96-e9c4ba1101fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.347473] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735910, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.405705] env[63028]: DEBUG nova.network.neutron [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Successfully created port: db82c13e-74f6-431e-9184-2375c4a0bbbc {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 959.503764] env[63028]: DEBUG nova.compute.manager [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 959.549456] env[63028]: DEBUG nova.compute.manager [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 959.549456] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.550344] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13760cd7-639f-4a31-9ec3-96fa29e02ebc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.563472] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.563814] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47f6a764-e8e9-4a53-8cdd-633ab6cc2f05 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.571956] env[63028]: DEBUG oslo_vmware.api [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 959.571956] env[63028]: value = "task-2735912" [ 959.571956] env[63028]: _type = "Task" [ 959.571956] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.581477] env[63028]: DEBUG oslo_vmware.api [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735912, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.739326] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735911, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.787451] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e7858f-9655-85c5-6d96-e9c4ba1101fa, 'name': SearchDatastore_Task, 'duration_secs': 0.010518} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.787451] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.787838] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9/e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 959.788929] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b76e8ff-cbcc-475c-917b-d9cfaf8d6b7a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.799021] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 959.799021] env[63028]: value = "task-2735913" [ 959.799021] env[63028]: _type = "Task" [ 959.799021] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.811259] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735913, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.851664] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735910, 'name': RemoveSnapshot_Task, 'duration_secs': 0.556289} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.852841] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 959.852841] env[63028]: DEBUG nova.compute.manager [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 959.853237] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bcf6af4-bbf0-4adc-9c08-1b2872419574 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.018983] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e7695b-45c8-4e28-8193-5decca0525f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.034814] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d6d404-2103-43f5-bac6-4dc9b55a9259 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.073569] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7422bc36-dc1a-4138-af61-8ad50174d348 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.094163] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4084c03c-64f1-4b5a-beb4-09f2b125184b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.098457] env[63028]: DEBUG oslo_vmware.api [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735912, 'name': PowerOffVM_Task, 'duration_secs': 0.26554} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.098783] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.098977] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 960.099632] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06d35afe-a339-4c4b-93b9-8d32aa8f2d86 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.114691] env[63028]: DEBUG nova.compute.provider_tree [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.211029] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 960.211029] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 960.211214] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleting the datastore file [datastore2] 672695c2-06f3-4790-a459-4b575baf29d3 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 960.215022] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75cedf0d-7c29-41b5-a0a1-45f8cae39585 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.219934] env[63028]: DEBUG oslo_vmware.api [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for the task: (returnval){ [ 960.219934] env[63028]: value = "task-2735915" [ 960.219934] env[63028]: _type = "Task" [ 960.219934] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.229617] env[63028]: DEBUG oslo_vmware.api [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.238684] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735911, 'name': PowerOnVM_Task, 'duration_secs': 0.514139} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.239053] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 960.239322] env[63028]: INFO nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Took 8.94 seconds to spawn the instance on the hypervisor. [ 960.239459] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 960.240425] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4484167-d73c-4940-95d2-0c50a1af6a0e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.307790] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735913, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.369689] env[63028]: INFO nova.compute.manager [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Shelve offloading [ 960.528251] env[63028]: DEBUG nova.compute.manager [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 960.557991] env[63028]: DEBUG nova.virt.hardware [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 960.558306] env[63028]: DEBUG nova.virt.hardware [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 960.558409] env[63028]: DEBUG nova.virt.hardware [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 960.558592] env[63028]: DEBUG nova.virt.hardware [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 960.558741] env[63028]: DEBUG nova.virt.hardware [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 960.558892] env[63028]: DEBUG nova.virt.hardware [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 960.559384] env[63028]: DEBUG nova.virt.hardware [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 960.559587] env[63028]: DEBUG nova.virt.hardware [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 960.559775] env[63028]: DEBUG nova.virt.hardware [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 960.559940] env[63028]: DEBUG nova.virt.hardware [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 960.560130] env[63028]: DEBUG nova.virt.hardware [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 960.561412] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794fc3e4-7a11-45f9-91a7-df7dff3d0001 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.571488] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a269565-73d5-4655-ab0c-934bbc6cc334 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.618653] env[63028]: DEBUG nova.scheduler.client.report [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 960.729678] env[63028]: DEBUG oslo_vmware.api [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.765018] env[63028]: INFO nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Took 35.81 seconds to build instance. [ 960.808559] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735913, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.710342} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.808828] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9/e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 960.809053] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 960.809321] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df171241-0178-4adf-9a0c-0ba0fb35da9c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.816330] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 960.816330] env[63028]: value = "task-2735916" [ 960.816330] env[63028]: _type = "Task" [ 960.816330] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.825180] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735916, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.874302] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.874968] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f88a3a0-e2ea-4449-a365-5a81c9b5e430 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.878517] env[63028]: DEBUG nova.compute.manager [req-2e85e9ed-7ecd-4d27-879d-8da0569f13bd req-b9190d14-e7b4-485c-88d3-656b1da1c120 service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Received event network-vif-plugged-db82c13e-74f6-431e-9184-2375c4a0bbbc {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 960.878735] env[63028]: DEBUG oslo_concurrency.lockutils [req-2e85e9ed-7ecd-4d27-879d-8da0569f13bd req-b9190d14-e7b4-485c-88d3-656b1da1c120 service nova] Acquiring lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.878950] env[63028]: DEBUG oslo_concurrency.lockutils [req-2e85e9ed-7ecd-4d27-879d-8da0569f13bd req-b9190d14-e7b4-485c-88d3-656b1da1c120 service nova] Lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.879159] env[63028]: DEBUG oslo_concurrency.lockutils [req-2e85e9ed-7ecd-4d27-879d-8da0569f13bd req-b9190d14-e7b4-485c-88d3-656b1da1c120 service nova] Lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.879297] env[63028]: DEBUG nova.compute.manager [req-2e85e9ed-7ecd-4d27-879d-8da0569f13bd req-b9190d14-e7b4-485c-88d3-656b1da1c120 service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] No waiting events found dispatching network-vif-plugged-db82c13e-74f6-431e-9184-2375c4a0bbbc {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 960.879462] env[63028]: WARNING nova.compute.manager [req-2e85e9ed-7ecd-4d27-879d-8da0569f13bd req-b9190d14-e7b4-485c-88d3-656b1da1c120 service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Received unexpected event network-vif-plugged-db82c13e-74f6-431e-9184-2375c4a0bbbc for instance with vm_state building and task_state spawning. [ 960.885325] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 960.885325] env[63028]: value = "task-2735917" [ 960.885325] env[63028]: _type = "Task" [ 960.885325] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.895139] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 960.895386] env[63028]: DEBUG nova.compute.manager [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 960.896183] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2c6d83-b16d-4efc-be25-767f4969eeb7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.903755] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.903928] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.904141] env[63028]: DEBUG nova.network.neutron [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 961.016986] env[63028]: DEBUG nova.network.neutron [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Successfully updated port: db82c13e-74f6-431e-9184-2375c4a0bbbc {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 961.124040] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.634s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.124717] env[63028]: DEBUG nova.compute.manager [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 961.127295] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.260s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.127568] env[63028]: DEBUG nova.objects.instance [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lazy-loading 'resources' on Instance uuid b9d9fe4e-438c-4f68-b011-9eb9e10a381c {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 961.230419] env[63028]: DEBUG oslo_vmware.api [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Task: {'id': task-2735915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.601365} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.231181] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.231181] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.231181] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.231724] env[63028]: INFO nova.compute.manager [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Took 1.68 seconds to destroy the instance on the hypervisor. [ 961.231724] env[63028]: DEBUG oslo.service.loopingcall [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 961.231907] env[63028]: DEBUG nova.compute.manager [-] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 961.232046] env[63028]: DEBUG nova.network.neutron [-] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 961.264665] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "a97224e8-d69b-4c62-ab96-7cef037ef39b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.337s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.326419] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735916, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068921} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.326419] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 961.327201] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b07892f-80c6-47fa-94fd-9e03d20dbaa3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.352017] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9/e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.352873] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d59b2ef8-166d-418e-a577-62134095d547 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.373070] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 961.373070] env[63028]: value = "task-2735918" [ 961.373070] env[63028]: _type = "Task" [ 961.373070] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.381094] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735918, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.521023] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "refresh_cache-ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.521023] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "refresh_cache-ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.521023] env[63028]: DEBUG nova.network.neutron [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 961.635381] env[63028]: DEBUG nova.compute.utils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 961.640723] env[63028]: DEBUG nova.compute.manager [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 961.640915] env[63028]: DEBUG nova.network.neutron [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 961.687408] env[63028]: DEBUG nova.policy [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ff02cd727ce84a6badd667ef5aed4a64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0cc09f51b98c489ba0a3f14161e4a686', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 961.831941] env[63028]: DEBUG nova.network.neutron [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Updating instance_info_cache with network_info: [{"id": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "address": "fa:16:3e:bc:87:07", "network": {"id": "37013470-5bda-41a6-826c-03ac0d423c85", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1214578902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d3fdfda1694b2f9f5985831ea77a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap296dfd9e-84", "ovs_interfaceid": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.896448] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735918, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.077575] env[63028]: DEBUG nova.network.neutron [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 962.100660] env[63028]: DEBUG nova.network.neutron [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Successfully created port: 3cae8ebc-a19e-401f-aa80-28da2e6bcd42 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 962.142205] env[63028]: DEBUG nova.compute.manager [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 962.162333] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aedfcefe-e9ce-466e-9aaa-e9032445b27d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.173771] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ae7884-91b4-4fc7-8967-e408f4bbaac4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.209267] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74078546-bdc1-431f-864a-9b21246eed50 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.222416] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901d95e6-9afd-46ce-af20-52396aa0887c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.240168] env[63028]: DEBUG nova.compute.provider_tree [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.241575] env[63028]: DEBUG nova.network.neutron [-] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.323767] env[63028]: DEBUG nova.network.neutron [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Updating instance_info_cache with network_info: [{"id": "db82c13e-74f6-431e-9184-2375c4a0bbbc", "address": "fa:16:3e:1d:fe:e0", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb82c13e-74", "ovs_interfaceid": "db82c13e-74f6-431e-9184-2375c4a0bbbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.337574] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Releasing lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.385180] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735918, 'name': ReconfigVM_Task, 'duration_secs': 0.723986} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.385502] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Reconfigured VM instance instance-00000054 to attach disk [datastore2] e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9/e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.387989] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2280a38a-f352-4a84-8520-cf58e4891373 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.396352] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 962.396352] env[63028]: value = "task-2735919" [ 962.396352] env[63028]: _type = "Task" [ 962.396352] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.399539] env[63028]: DEBUG oslo_concurrency.lockutils [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.399767] env[63028]: DEBUG oslo_concurrency.lockutils [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.399946] env[63028]: INFO nova.compute.manager [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Shelving [ 962.409905] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735919, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.574730] env[63028]: DEBUG oslo_concurrency.lockutils [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.574730] env[63028]: DEBUG oslo_concurrency.lockutils [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.677906] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.678847] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a553b4-55ef-4c13-9141-5b6f4d5e5c36 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.686932] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 962.687821] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b641addc-5251-4880-ab2e-f6347ead48de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.744596] env[63028]: DEBUG nova.scheduler.client.report [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 962.748812] env[63028]: INFO nova.compute.manager [-] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Took 1.52 seconds to deallocate network for instance. [ 962.759009] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 962.759009] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 962.759009] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Deleting the datastore file [datastore2] 63524cd8-81de-419f-bb07-0326f3cb393f {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 962.760244] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f3b06b6-4cf2-482d-b737-50b71cac71dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.765731] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 962.765731] env[63028]: value = "task-2735921" [ 962.765731] env[63028]: _type = "Task" [ 962.765731] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.774194] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.827262] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "refresh_cache-ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.827605] env[63028]: DEBUG nova.compute.manager [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Instance network_info: |[{"id": "db82c13e-74f6-431e-9184-2375c4a0bbbc", "address": "fa:16:3e:1d:fe:e0", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb82c13e-74", "ovs_interfaceid": "db82c13e-74f6-431e-9184-2375c4a0bbbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 962.828039] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:fe:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db82c13e-74f6-431e-9184-2375c4a0bbbc', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 962.836385] env[63028]: DEBUG oslo.service.loopingcall [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 962.836995] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 962.837780] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-160aa36c-6f31-4ebf-b902-915bfdc2eba0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.860212] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 962.860212] env[63028]: value = "task-2735922" [ 962.860212] env[63028]: _type = "Task" [ 962.860212] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.868990] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735922, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.908189] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735919, 'name': Rename_Task, 'duration_secs': 0.149262} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.911088] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.912335] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92ecb0d7-e370-4535-bb6b-070607b27947 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.919177] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 962.919177] env[63028]: value = "task-2735923" [ 962.919177] env[63028]: _type = "Task" [ 962.919177] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.930703] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735923, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.949896] env[63028]: DEBUG nova.compute.manager [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Received event network-changed-db82c13e-74f6-431e-9184-2375c4a0bbbc {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 962.950051] env[63028]: DEBUG nova.compute.manager [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Refreshing instance network info cache due to event network-changed-db82c13e-74f6-431e-9184-2375c4a0bbbc. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 962.950285] env[63028]: DEBUG oslo_concurrency.lockutils [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] Acquiring lock "refresh_cache-ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.950471] env[63028]: DEBUG oslo_concurrency.lockutils [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] Acquired lock "refresh_cache-ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.950674] env[63028]: DEBUG nova.network.neutron [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Refreshing network info cache for port db82c13e-74f6-431e-9184-2375c4a0bbbc {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.080942] env[63028]: DEBUG nova.compute.utils [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 963.152278] env[63028]: DEBUG nova.compute.manager [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 963.184187] env[63028]: DEBUG nova.virt.hardware [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 963.184646] env[63028]: DEBUG nova.virt.hardware [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 963.184938] env[63028]: DEBUG nova.virt.hardware [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 963.185295] env[63028]: DEBUG nova.virt.hardware [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 963.185603] env[63028]: DEBUG nova.virt.hardware [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 963.185884] env[63028]: DEBUG nova.virt.hardware [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 963.186324] env[63028]: DEBUG nova.virt.hardware [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 963.186557] env[63028]: DEBUG nova.virt.hardware [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 963.186775] env[63028]: DEBUG nova.virt.hardware [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 963.187021] env[63028]: DEBUG nova.virt.hardware [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 963.187265] env[63028]: DEBUG nova.virt.hardware [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 963.188211] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0426ce85-fc37-47df-8435-e0b82e0b49bf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.196032] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2037038a-dbcc-4b0b-9215-16dbaacf6f5b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.250256] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.123s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.252697] env[63028]: DEBUG oslo_concurrency.lockutils [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.980s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.252932] env[63028]: DEBUG nova.objects.instance [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lazy-loading 'resources' on Instance uuid f3277886-4498-45c6-be68-e71d8293dc00 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 963.256307] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.273300] env[63028]: INFO nova.scheduler.client.report [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleted allocations for instance b9d9fe4e-438c-4f68-b011-9eb9e10a381c [ 963.277261] env[63028]: DEBUG oslo_vmware.api [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2735921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246625} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.279545] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.279775] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.279986] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 963.301130] env[63028]: INFO nova.scheduler.client.report [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Deleted allocations for instance 63524cd8-81de-419f-bb07-0326f3cb393f [ 963.369950] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735922, 'name': CreateVM_Task, 'duration_secs': 0.348715} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.370138] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 963.370857] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.371041] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.371364] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 963.371620] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3da932e7-8452-4662-82d9-4e1360648cb8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.376340] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 963.376340] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5237fab3-74f4-26cc-16ca-08a5f9375812" [ 963.376340] env[63028]: _type = "Task" [ 963.376340] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.383759] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5237fab3-74f4-26cc-16ca-08a5f9375812, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.414988] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 963.415355] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58c3c7e7-9a23-4d46-98ac-f0222599f9c0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.424259] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 963.424259] env[63028]: value = "task-2735924" [ 963.424259] env[63028]: _type = "Task" [ 963.424259] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.431576] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735923, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.436720] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735924, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.581321] env[63028]: DEBUG oslo_concurrency.lockutils [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.709420] env[63028]: DEBUG nova.network.neutron [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Successfully updated port: 3cae8ebc-a19e-401f-aa80-28da2e6bcd42 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 963.732205] env[63028]: DEBUG nova.network.neutron [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Updated VIF entry in instance network info cache for port db82c13e-74f6-431e-9184-2375c4a0bbbc. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 963.732607] env[63028]: DEBUG nova.network.neutron [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Updating instance_info_cache with network_info: [{"id": "db82c13e-74f6-431e-9184-2375c4a0bbbc", "address": "fa:16:3e:1d:fe:e0", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb82c13e-74", "ovs_interfaceid": "db82c13e-74f6-431e-9184-2375c4a0bbbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.786380] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c8f32af8-7ee0-464d-801d-994c5e7837ce tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "b9d9fe4e-438c-4f68-b011-9eb9e10a381c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.612s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.808114] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.890633] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5237fab3-74f4-26cc-16ca-08a5f9375812, 'name': SearchDatastore_Task, 'duration_secs': 0.020147} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.891115] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.891335] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 963.891791] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.891791] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.891928] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 963.892163] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a20fafca-cae0-48f0-84f2-b40e8e8941e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.899980] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 963.900182] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 963.900941] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a3c2859-6e95-42ca-bcea-adbb60d51c64 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.905973] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 963.905973] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523ebece-8c04-fc3c-7634-35b61211b987" [ 963.905973] env[63028]: _type = "Task" [ 963.905973] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.916213] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523ebece-8c04-fc3c-7634-35b61211b987, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.933849] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735923, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.941640] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735924, 'name': PowerOffVM_Task, 'duration_secs': 0.315084} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.941911] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 963.942769] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ce4497-8d27-4eab-8ae4-a12b5e7eb44b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.969018] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9630f15-e034-4352-a62e-f1f38aa220a0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.059750] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Acquiring lock "3fb46d02-7914-4d08-b63b-f3447ba1b81a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.059750] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Lock "3fb46d02-7914-4d08-b63b-f3447ba1b81a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.144622] env[63028]: DEBUG nova.compute.manager [req-973ba466-eba0-4bb6-941b-270e8adf424e req-259bed74-d8bf-448c-922f-8947f92e23e7 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Received event network-vif-plugged-3cae8ebc-a19e-401f-aa80-28da2e6bcd42 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 964.144861] env[63028]: DEBUG oslo_concurrency.lockutils [req-973ba466-eba0-4bb6-941b-270e8adf424e req-259bed74-d8bf-448c-922f-8947f92e23e7 service nova] Acquiring lock "455578fa-7468-40dc-8c0a-37ac35e5c0a0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.145092] env[63028]: DEBUG oslo_concurrency.lockutils [req-973ba466-eba0-4bb6-941b-270e8adf424e req-259bed74-d8bf-448c-922f-8947f92e23e7 service nova] Lock "455578fa-7468-40dc-8c0a-37ac35e5c0a0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.145319] env[63028]: DEBUG oslo_concurrency.lockutils [req-973ba466-eba0-4bb6-941b-270e8adf424e req-259bed74-d8bf-448c-922f-8947f92e23e7 service nova] Lock "455578fa-7468-40dc-8c0a-37ac35e5c0a0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.145445] env[63028]: DEBUG nova.compute.manager [req-973ba466-eba0-4bb6-941b-270e8adf424e req-259bed74-d8bf-448c-922f-8947f92e23e7 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] No waiting events found dispatching network-vif-plugged-3cae8ebc-a19e-401f-aa80-28da2e6bcd42 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 964.145616] env[63028]: WARNING nova.compute.manager [req-973ba466-eba0-4bb6-941b-270e8adf424e req-259bed74-d8bf-448c-922f-8947f92e23e7 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Received unexpected event network-vif-plugged-3cae8ebc-a19e-401f-aa80-28da2e6bcd42 for instance with vm_state building and task_state spawning. [ 964.164329] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4801bea6-8e1b-4f08-978a-2edb7715450d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.172674] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5f862f-9bf8-4ceb-8bd0-4d09f03e4fa3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.203854] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358152ae-8891-4e06-a940-4d5673223648 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.211942] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe9fa7d-2b9b-4e0e-baa6-c3744f5cd93d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.218704] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Acquiring lock "refresh_cache-455578fa-7468-40dc-8c0a-37ac35e5c0a0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.218704] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Acquired lock "refresh_cache-455578fa-7468-40dc-8c0a-37ac35e5c0a0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.218704] env[63028]: DEBUG nova.network.neutron [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 964.230564] env[63028]: DEBUG nova.compute.provider_tree [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.236780] env[63028]: DEBUG oslo_concurrency.lockutils [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] Releasing lock "refresh_cache-ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.237824] env[63028]: DEBUG nova.compute.manager [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Received event network-vif-deleted-562720f0-e1d1-414a-a602-d4ae400ade6f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 964.237824] env[63028]: DEBUG nova.compute.manager [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Received event network-vif-unplugged-296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 964.237824] env[63028]: DEBUG oslo_concurrency.lockutils [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] Acquiring lock "63524cd8-81de-419f-bb07-0326f3cb393f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.237824] env[63028]: DEBUG oslo_concurrency.lockutils [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] Lock "63524cd8-81de-419f-bb07-0326f3cb393f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.237824] env[63028]: DEBUG oslo_concurrency.lockutils [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] Lock "63524cd8-81de-419f-bb07-0326f3cb393f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.238314] env[63028]: DEBUG nova.compute.manager [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] No waiting events found dispatching network-vif-unplugged-296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 964.238314] env[63028]: WARNING nova.compute.manager [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Received unexpected event network-vif-unplugged-296dfd9e-84e1-4ea8-bd17-28920a6a048b for instance with vm_state shelved and task_state shelving_offloading. [ 964.238314] env[63028]: DEBUG nova.compute.manager [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Received event network-changed-296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 964.238475] env[63028]: DEBUG nova.compute.manager [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Refreshing instance network info cache due to event network-changed-296dfd9e-84e1-4ea8-bd17-28920a6a048b. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 964.238881] env[63028]: DEBUG oslo_concurrency.lockutils [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] Acquiring lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.238881] env[63028]: DEBUG oslo_concurrency.lockutils [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] Acquired lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.238881] env[63028]: DEBUG nova.network.neutron [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Refreshing network info cache for port 296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 964.418913] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523ebece-8c04-fc3c-7634-35b61211b987, 'name': SearchDatastore_Task, 'duration_secs': 0.010251} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.419806] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-038df819-de69-427f-9a66-0557331b595c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.428273] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 964.428273] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5232e4ed-3747-94bd-6f32-75ef126bac4c" [ 964.428273] env[63028]: _type = "Task" [ 964.428273] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.432046] env[63028]: DEBUG oslo_vmware.api [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735923, 'name': PowerOnVM_Task, 'duration_secs': 1.142297} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.434536] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.434753] env[63028]: INFO nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Took 10.60 seconds to spawn the instance on the hypervisor. [ 964.434933] env[63028]: DEBUG nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 964.435703] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8731bf1-a2f6-49f3-94ec-ae1fe5b59845 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.447210] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5232e4ed-3747-94bd-6f32-75ef126bac4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.481842] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 964.482641] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9cc5bd0e-b4b0-445f-acf6-41932ac89e43 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.490209] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 964.490209] env[63028]: value = "task-2735925" [ 964.490209] env[63028]: _type = "Task" [ 964.490209] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.498221] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735925, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.561644] env[63028]: DEBUG nova.compute.manager [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 964.653830] env[63028]: DEBUG oslo_concurrency.lockutils [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.654071] env[63028]: DEBUG oslo_concurrency.lockutils [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.654367] env[63028]: INFO nova.compute.manager [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Attaching volume 15326c33-7e0b-41be-bf2e-5b82153cea0d to /dev/sdb [ 964.686512] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb099fb3-359b-40ba-84b5-8878c183e044 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.694215] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ceb6fbc-c7ed-4fd3-8d04-3d8abb3fda5d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.708558] env[63028]: DEBUG nova.virt.block_device [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Updating existing volume attachment record: a6f07986-e80d-4155-8b55-bf216a59d484 {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 964.733603] env[63028]: DEBUG nova.scheduler.client.report [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.772597] env[63028]: DEBUG nova.network.neutron [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 964.887669] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "ba57ed92-aaef-460c-bd45-d0cbe09e4615" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.888524] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "ba57ed92-aaef-460c-bd45-d0cbe09e4615" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.888876] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "ba57ed92-aaef-460c-bd45-d0cbe09e4615-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.889232] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "ba57ed92-aaef-460c-bd45-d0cbe09e4615-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.889470] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "ba57ed92-aaef-460c-bd45-d0cbe09e4615-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.892802] env[63028]: INFO nova.compute.manager [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Terminating instance [ 964.945180] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5232e4ed-3747-94bd-6f32-75ef126bac4c, 'name': SearchDatastore_Task, 'duration_secs': 0.021997} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.946137] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.946429] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19/ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 964.946725] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15a23c8b-5578-4e4f-9722-d8cae537dffb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.960511] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 964.960511] env[63028]: value = "task-2735927" [ 964.960511] env[63028]: _type = "Task" [ 964.960511] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.960981] env[63028]: INFO nova.compute.manager [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Took 39.96 seconds to build instance. [ 964.970204] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735927, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.000380] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735925, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.018107] env[63028]: DEBUG nova.network.neutron [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Updating instance_info_cache with network_info: [{"id": "3cae8ebc-a19e-401f-aa80-28da2e6bcd42", "address": "fa:16:3e:da:e5:5c", "network": {"id": "12d9a704-bca6-4132-add3-8d8bb4a11dbd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-831034052-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cc09f51b98c489ba0a3f14161e4a686", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7041d198-66a3-40de-bf7d-cfc036e6ed69", "external-id": "nsx-vlan-transportzone-278", "segmentation_id": 278, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cae8ebc-a1", "ovs_interfaceid": "3cae8ebc-a19e-401f-aa80-28da2e6bcd42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.085015] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.096751] env[63028]: DEBUG nova.network.neutron [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Updated VIF entry in instance network info cache for port 296dfd9e-84e1-4ea8-bd17-28920a6a048b. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 965.097423] env[63028]: DEBUG nova.network.neutron [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Updating instance_info_cache with network_info: [{"id": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "address": "fa:16:3e:bc:87:07", "network": {"id": "37013470-5bda-41a6-826c-03ac0d423c85", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1214578902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d3fdfda1694b2f9f5985831ea77a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap296dfd9e-84", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.239229] env[63028]: DEBUG oslo_concurrency.lockutils [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.986s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.241986] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.443s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.243670] env[63028]: INFO nova.compute.claims [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 965.265805] env[63028]: INFO nova.scheduler.client.report [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Deleted allocations for instance f3277886-4498-45c6-be68-e71d8293dc00 [ 965.403570] env[63028]: DEBUG nova.compute.manager [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 965.403890] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.405063] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335e1ee7-5a04-4bd1-a1af-6fb47f0f05a6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.414999] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.415361] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42751952-805c-4a1d-a4b6-c1353b7f3d5d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.422793] env[63028]: DEBUG oslo_vmware.api [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 965.422793] env[63028]: value = "task-2735930" [ 965.422793] env[63028]: _type = "Task" [ 965.422793] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.431641] env[63028]: DEBUG oslo_vmware.api [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.467521] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cd473ce4-2436-4b43-a6af-0ece4c944755 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.490s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.473157] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735927, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.502292] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735925, 'name': CreateSnapshot_Task, 'duration_secs': 0.803689} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.502624] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 965.503498] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b29e3e-6343-4160-8b4b-5b2d11d6d7df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.520891] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Releasing lock "refresh_cache-455578fa-7468-40dc-8c0a-37ac35e5c0a0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.522047] env[63028]: DEBUG nova.compute.manager [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Instance network_info: |[{"id": "3cae8ebc-a19e-401f-aa80-28da2e6bcd42", "address": "fa:16:3e:da:e5:5c", "network": {"id": "12d9a704-bca6-4132-add3-8d8bb4a11dbd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-831034052-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cc09f51b98c489ba0a3f14161e4a686", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7041d198-66a3-40de-bf7d-cfc036e6ed69", "external-id": "nsx-vlan-transportzone-278", "segmentation_id": 278, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cae8ebc-a1", "ovs_interfaceid": "3cae8ebc-a19e-401f-aa80-28da2e6bcd42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 965.522047] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:e5:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7041d198-66a3-40de-bf7d-cfc036e6ed69', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3cae8ebc-a19e-401f-aa80-28da2e6bcd42', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.530058] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Creating folder: Project (0cc09f51b98c489ba0a3f14161e4a686). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 965.530754] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06420670-5fec-484c-84f3-5cf6837702a0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.541648] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Created folder: Project (0cc09f51b98c489ba0a3f14161e4a686) in parent group-v550570. [ 965.541914] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Creating folder: Instances. Parent ref: group-v550818. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 965.542214] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab9ede56-b2ec-44db-83db-a4a6d9de1261 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.551526] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Created folder: Instances in parent group-v550818. [ 965.551844] env[63028]: DEBUG oslo.service.loopingcall [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.552891] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 965.553181] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-81f07e4c-2e10-4419-a505-156dafcf0fcf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.575071] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.575071] env[63028]: value = "task-2735933" [ 965.575071] env[63028]: _type = "Task" [ 965.575071] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.589828] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735933, 'name': CreateVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.601847] env[63028]: DEBUG oslo_concurrency.lockutils [req-8b58d10a-abb2-4e1c-928c-ca06d75a0de4 req-84170d96-5cbf-4c1c-bf65-109a00f6d68d service nova] Releasing lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.776171] env[63028]: DEBUG oslo_concurrency.lockutils [None req-697ec9df-f4ed-4ed1-9652-15047a7296b9 tempest-MigrationsAdminTest-1750269171 tempest-MigrationsAdminTest-1750269171-project-member] Lock "f3277886-4498-45c6-be68-e71d8293dc00" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.063s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.932876] env[63028]: DEBUG oslo_vmware.api [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735930, 'name': PowerOffVM_Task, 'duration_secs': 0.376247} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.933637] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.934027] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.934503] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-289ce531-f798-4ef2-8ee5-27881aa3f157 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.974618] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735927, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.549935} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.974894] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19/ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.975154] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.975427] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a8cf9d9-4ceb-4aa8-978f-d396e147d414 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.980420] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "63524cd8-81de-419f-bb07-0326f3cb393f" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.982305] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 965.982305] env[63028]: value = "task-2735935" [ 965.982305] env[63028]: _type = "Task" [ 965.982305] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.995600] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735935, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.008048] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 966.008331] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 966.008508] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleting the datastore file [datastore2] ba57ed92-aaef-460c-bd45-d0cbe09e4615 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 966.008766] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55c144cd-9070-4145-81ac-7e59f998a7e6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.015407] env[63028]: DEBUG oslo_vmware.api [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for the task: (returnval){ [ 966.015407] env[63028]: value = "task-2735936" [ 966.015407] env[63028]: _type = "Task" [ 966.015407] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.026087] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 966.026429] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ec113334-f5dd-4a23-879c-9070e70cb92b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.035854] env[63028]: DEBUG oslo_vmware.api [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735936, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.037134] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 966.037134] env[63028]: value = "task-2735937" [ 966.037134] env[63028]: _type = "Task" [ 966.037134] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.046529] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735937, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.088034] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735933, 'name': CreateVM_Task, 'duration_secs': 0.412894} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.088034] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 966.088034] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.088034] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.088034] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 966.088317] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0424cd7c-e506-461e-a674-1e018cf3a968 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.093067] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Waiting for the task: (returnval){ [ 966.093067] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521cb99f-5a03-4460-3d11-47aba92f92bc" [ 966.093067] env[63028]: _type = "Task" [ 966.093067] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.101734] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521cb99f-5a03-4460-3d11-47aba92f92bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.227906] env[63028]: DEBUG nova.compute.manager [req-e83ed4c0-cbd7-4868-aa49-75f51ed119aa req-596fe8bd-2e90-4f0d-99d9-6331713708e9 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Received event network-changed-3cae8ebc-a19e-401f-aa80-28da2e6bcd42 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 966.228086] env[63028]: DEBUG nova.compute.manager [req-e83ed4c0-cbd7-4868-aa49-75f51ed119aa req-596fe8bd-2e90-4f0d-99d9-6331713708e9 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Refreshing instance network info cache due to event network-changed-3cae8ebc-a19e-401f-aa80-28da2e6bcd42. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 966.228324] env[63028]: DEBUG oslo_concurrency.lockutils [req-e83ed4c0-cbd7-4868-aa49-75f51ed119aa req-596fe8bd-2e90-4f0d-99d9-6331713708e9 service nova] Acquiring lock "refresh_cache-455578fa-7468-40dc-8c0a-37ac35e5c0a0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.228465] env[63028]: DEBUG oslo_concurrency.lockutils [req-e83ed4c0-cbd7-4868-aa49-75f51ed119aa req-596fe8bd-2e90-4f0d-99d9-6331713708e9 service nova] Acquired lock "refresh_cache-455578fa-7468-40dc-8c0a-37ac35e5c0a0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.228659] env[63028]: DEBUG nova.network.neutron [req-e83ed4c0-cbd7-4868-aa49-75f51ed119aa req-596fe8bd-2e90-4f0d-99d9-6331713708e9 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Refreshing network info cache for port 3cae8ebc-a19e-401f-aa80-28da2e6bcd42 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 966.240797] env[63028]: DEBUG oslo_concurrency.lockutils [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "bb2b405e-6207-4718-9485-0271d26c160f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.240797] env[63028]: DEBUG oslo_concurrency.lockutils [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "bb2b405e-6207-4718-9485-0271d26c160f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.240797] env[63028]: DEBUG oslo_concurrency.lockutils [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "bb2b405e-6207-4718-9485-0271d26c160f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.240969] env[63028]: DEBUG oslo_concurrency.lockutils [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "bb2b405e-6207-4718-9485-0271d26c160f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.241144] env[63028]: DEBUG oslo_concurrency.lockutils [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "bb2b405e-6207-4718-9485-0271d26c160f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.243819] env[63028]: INFO nova.compute.manager [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Terminating instance [ 966.494330] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735935, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067629} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.494330] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 966.495111] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed73ee93-ac0e-407b-b95a-3b7fe1ac3c1d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.520604] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19/ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.523317] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cfb98df-24bb-47d9-aaa5-74ef5728a7ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.546017] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 966.546017] env[63028]: value = "task-2735938" [ 966.546017] env[63028]: _type = "Task" [ 966.546017] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.557076] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735937, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.557411] env[63028]: DEBUG oslo_vmware.api [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Task: {'id': task-2735936, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.341282} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.557976] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.558226] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.558444] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.558633] env[63028]: INFO nova.compute.manager [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Took 1.15 seconds to destroy the instance on the hypervisor. [ 966.558869] env[63028]: DEBUG oslo.service.loopingcall [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.559106] env[63028]: DEBUG nova.compute.manager [-] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 966.559217] env[63028]: DEBUG nova.network.neutron [-] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.563580] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.604459] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521cb99f-5a03-4460-3d11-47aba92f92bc, 'name': SearchDatastore_Task, 'duration_secs': 0.031694} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.607526] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.607797] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.608125] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.608226] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.608497] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.609032] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc3a3f2f-a5aa-45a4-b33f-2c9f70b76ebc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.623447] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.624394] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 966.624626] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b7e8383-29bc-499f-b921-d8813d01f141 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.629847] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Waiting for the task: (returnval){ [ 966.629847] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bf5e0a-5db8-1496-2545-93cf40b8bac8" [ 966.629847] env[63028]: _type = "Task" [ 966.629847] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.640203] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bf5e0a-5db8-1496-2545-93cf40b8bac8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.727451] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abda29c3-9d3a-4384-ab03-477c0d93f1c0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.737825] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9b55d9-a964-486f-9e75-4dc490a3655b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.773045] env[63028]: DEBUG nova.compute.manager [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 966.773250] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 966.774269] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0837f157-e91a-459b-97d5-780973a759cb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.777833] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91fbc55-fbcf-4ded-b8a7-a9fa16672b15 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.788026] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 966.788236] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a5f00bc-7a1f-46ff-b3ad-20765a43828d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.791090] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38127150-6809-4a32-a24b-f306b93f7fc0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.808453] env[63028]: DEBUG nova.compute.provider_tree [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.814371] env[63028]: DEBUG oslo_vmware.api [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 966.814371] env[63028]: value = "task-2735939" [ 966.814371] env[63028]: _type = "Task" [ 966.814371] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.823266] env[63028]: DEBUG oslo_vmware.api [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735939, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.051732] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735937, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.059550] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735938, 'name': ReconfigVM_Task, 'duration_secs': 0.265501} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.059856] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Reconfigured VM instance instance-00000055 to attach disk [datastore2] ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19/ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 967.060542] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b0e625f-66c6-40a5-94c9-97a5c7b5f49a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.066315] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 967.066315] env[63028]: value = "task-2735940" [ 967.066315] env[63028]: _type = "Task" [ 967.066315] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.076576] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735940, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.141075] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bf5e0a-5db8-1496-2545-93cf40b8bac8, 'name': SearchDatastore_Task, 'duration_secs': 0.012615} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.141959] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a5e426a-5e59-443b-be69-73b05f2ad02a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.147996] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Waiting for the task: (returnval){ [ 967.147996] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c3c933-6080-9a90-a378-adee04049c16" [ 967.147996] env[63028]: _type = "Task" [ 967.147996] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.156394] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c3c933-6080-9a90-a378-adee04049c16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.316236] env[63028]: DEBUG nova.scheduler.client.report [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 967.336553] env[63028]: DEBUG oslo_vmware.api [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735939, 'name': PowerOffVM_Task, 'duration_secs': 0.195707} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.336553] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 967.336672] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 967.337008] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b66719dd-c9bc-495b-ae01-38bb37aab435 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.345135] env[63028]: DEBUG nova.network.neutron [-] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.356857] env[63028]: DEBUG nova.network.neutron [req-e83ed4c0-cbd7-4868-aa49-75f51ed119aa req-596fe8bd-2e90-4f0d-99d9-6331713708e9 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Updated VIF entry in instance network info cache for port 3cae8ebc-a19e-401f-aa80-28da2e6bcd42. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 967.356857] env[63028]: DEBUG nova.network.neutron [req-e83ed4c0-cbd7-4868-aa49-75f51ed119aa req-596fe8bd-2e90-4f0d-99d9-6331713708e9 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Updating instance_info_cache with network_info: [{"id": "3cae8ebc-a19e-401f-aa80-28da2e6bcd42", "address": "fa:16:3e:da:e5:5c", "network": {"id": "12d9a704-bca6-4132-add3-8d8bb4a11dbd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-831034052-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cc09f51b98c489ba0a3f14161e4a686", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7041d198-66a3-40de-bf7d-cfc036e6ed69", "external-id": "nsx-vlan-transportzone-278", "segmentation_id": 278, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cae8ebc-a1", "ovs_interfaceid": "3cae8ebc-a19e-401f-aa80-28da2e6bcd42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.402858] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 967.403108] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 967.403332] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Deleting the datastore file [datastore1] bb2b405e-6207-4718-9485-0271d26c160f {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 967.403622] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f061929-765c-4a28-b4c3-032c41fab3e6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.411150] env[63028]: DEBUG oslo_vmware.api [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 967.411150] env[63028]: value = "task-2735943" [ 967.411150] env[63028]: _type = "Task" [ 967.411150] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.419713] env[63028]: DEBUG oslo_vmware.api [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735943, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.552863] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735937, 'name': CloneVM_Task} progress is 95%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.576954] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735940, 'name': Rename_Task, 'duration_secs': 0.144145} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.577306] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.577570] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b2a28e8-5d8e-49e9-93af-fb1597ef9896 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.583529] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 967.583529] env[63028]: value = "task-2735944" [ 967.583529] env[63028]: _type = "Task" [ 967.583529] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.591981] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735944, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.659830] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c3c933-6080-9a90-a378-adee04049c16, 'name': SearchDatastore_Task, 'duration_secs': 0.014428} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.660111] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.660364] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 455578fa-7468-40dc-8c0a-37ac35e5c0a0/455578fa-7468-40dc-8c0a-37ac35e5c0a0.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.660623] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-040a3106-80db-4e13-9302-9ebb1bbb478a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.668452] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Waiting for the task: (returnval){ [ 967.668452] env[63028]: value = "task-2735945" [ 967.668452] env[63028]: _type = "Task" [ 967.668452] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.676729] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735945, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.829018] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.584s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.829018] env[63028]: DEBUG nova.compute.manager [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 967.830333] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.915s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.830415] env[63028]: DEBUG nova.objects.instance [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lazy-loading 'resources' on Instance uuid 8f621e7b-0c76-4f70-830d-09d28a2e0736 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.850874] env[63028]: INFO nova.compute.manager [-] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Took 1.29 seconds to deallocate network for instance. [ 967.858361] env[63028]: DEBUG oslo_concurrency.lockutils [req-e83ed4c0-cbd7-4868-aa49-75f51ed119aa req-596fe8bd-2e90-4f0d-99d9-6331713708e9 service nova] Releasing lock "refresh_cache-455578fa-7468-40dc-8c0a-37ac35e5c0a0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.921339] env[63028]: DEBUG oslo_vmware.api [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2735943, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.416885} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.921601] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 967.921793] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 967.921968] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 967.922156] env[63028]: INFO nova.compute.manager [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 967.922397] env[63028]: DEBUG oslo.service.loopingcall [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 967.922581] env[63028]: DEBUG nova.compute.manager [-] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 967.922675] env[63028]: DEBUG nova.network.neutron [-] [instance: bb2b405e-6207-4718-9485-0271d26c160f] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 968.054593] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735937, 'name': CloneVM_Task, 'duration_secs': 1.816647} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.054983] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Created linked-clone VM from snapshot [ 968.056063] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1edf914d-7424-46e1-9dcc-a7db6e1c9630 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.065282] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Uploading image 89f02435-fa22-4275-ab99-73170c1e53cf {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 968.097329] env[63028]: DEBUG oslo_vmware.api [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735944, 'name': PowerOnVM_Task, 'duration_secs': 0.506693} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.097620] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.097824] env[63028]: INFO nova.compute.manager [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Took 7.57 seconds to spawn the instance on the hypervisor. [ 968.097964] env[63028]: DEBUG nova.compute.manager [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 968.098840] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aad9c04-58ee-4cb8-89a2-c517a7e951fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.125015] env[63028]: DEBUG oslo_vmware.rw_handles [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 968.125015] env[63028]: value = "vm-550821" [ 968.125015] env[63028]: _type = "VirtualMachine" [ 968.125015] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 968.125015] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a64d5716-34df-436b-8e04-337acc0d4ea1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.136393] env[63028]: DEBUG oslo_vmware.rw_handles [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lease: (returnval){ [ 968.136393] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a403c4-f7d7-6f9d-e878-c604561af9e4" [ 968.136393] env[63028]: _type = "HttpNfcLease" [ 968.136393] env[63028]: } obtained for exporting VM: (result){ [ 968.136393] env[63028]: value = "vm-550821" [ 968.136393] env[63028]: _type = "VirtualMachine" [ 968.136393] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 968.137578] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the lease: (returnval){ [ 968.137578] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a403c4-f7d7-6f9d-e878-c604561af9e4" [ 968.137578] env[63028]: _type = "HttpNfcLease" [ 968.137578] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 968.145016] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 968.145016] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a403c4-f7d7-6f9d-e878-c604561af9e4" [ 968.145016] env[63028]: _type = "HttpNfcLease" [ 968.145016] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 968.179281] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735945, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.302522] env[63028]: DEBUG nova.compute.manager [req-bfbde586-1a60-4865-9084-3edd82a7d1c4 req-a566b213-3cb8-448f-bd71-32d917b75874 service nova] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Received event network-vif-deleted-66421979-b107-4dd5-9bc4-40bdefa3a5d0 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 968.334368] env[63028]: DEBUG nova.compute.utils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 968.339561] env[63028]: DEBUG nova.compute.manager [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 968.339561] env[63028]: DEBUG nova.network.neutron [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 968.360037] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.428293] env[63028]: DEBUG nova.policy [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc25e0b534f448c8b7bc90f53fdbfba6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bae448aa28a84aa6863fffc24a5448fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 968.630073] env[63028]: INFO nova.compute.manager [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Took 38.61 seconds to build instance. [ 968.635595] env[63028]: DEBUG nova.compute.manager [req-4fda10ad-7153-4eea-8b77-a43d27249bd7 req-eeff043e-8d91-48b9-a36d-3b567c11b3f9 service nova] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Received event network-vif-deleted-a9da993b-1d38-4109-b090-f639a239a687 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 968.637163] env[63028]: INFO nova.compute.manager [req-4fda10ad-7153-4eea-8b77-a43d27249bd7 req-eeff043e-8d91-48b9-a36d-3b567c11b3f9 service nova] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Neutron deleted interface a9da993b-1d38-4109-b090-f639a239a687; detaching it from the instance and deleting it from the info cache [ 968.637163] env[63028]: DEBUG nova.network.neutron [req-4fda10ad-7153-4eea-8b77-a43d27249bd7 req-eeff043e-8d91-48b9-a36d-3b567c11b3f9 service nova] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.646504] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 968.646504] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a403c4-f7d7-6f9d-e878-c604561af9e4" [ 968.646504] env[63028]: _type = "HttpNfcLease" [ 968.646504] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 968.647090] env[63028]: DEBUG oslo_vmware.rw_handles [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 968.647090] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a403c4-f7d7-6f9d-e878-c604561af9e4" [ 968.647090] env[63028]: _type = "HttpNfcLease" [ 968.647090] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 968.648839] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9505e415-065b-4edf-a48e-9b903a216960 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.662583] env[63028]: DEBUG oslo_vmware.rw_handles [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520a564c-51cc-5fbe-8830-de7c3b623560/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 968.662771] env[63028]: DEBUG oslo_vmware.rw_handles [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520a564c-51cc-5fbe-8830-de7c3b623560/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 968.739655] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735945, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642869} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.739655] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 455578fa-7468-40dc-8c0a-37ac35e5c0a0/455578fa-7468-40dc-8c0a-37ac35e5c0a0.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 968.739655] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 968.739655] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d50bcccc-8a42-4cb1-963e-faf389195943 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.750459] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Waiting for the task: (returnval){ [ 968.750459] env[63028]: value = "task-2735947" [ 968.750459] env[63028]: _type = "Task" [ 968.750459] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.759895] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735947, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.797262] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c6be82c7-0061-4533-988b-c3e02394da98 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.838389] env[63028]: DEBUG nova.compute.manager [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 968.865285] env[63028]: DEBUG nova.network.neutron [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Successfully created port: de1f8509-63e1-41ae-ad48-03c4e8b74871 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 968.872122] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14e295d-a6d1-403c-b9a6-d0f98893d4de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.882474] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d162b980-26db-4824-a20d-8838271e15e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.919373] env[63028]: DEBUG nova.network.neutron [-] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.922306] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5f28de-4fe7-42fe-b85b-fd15eb8bae5b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.932139] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d93ba80-a218-48ca-a174-aa0321f8b108 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.951525] env[63028]: DEBUG nova.compute.provider_tree [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.138530] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2a87602d-c3ea-493c-a1b4-1b103c3ef74c tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.131s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.139080] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f3e3011e-1dff-48cc-9f99-cf59b7d0dd23 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.148699] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215d7fe6-225d-4b32-901e-26f3692107c0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.186879] env[63028]: DEBUG nova.compute.manager [req-4fda10ad-7153-4eea-8b77-a43d27249bd7 req-eeff043e-8d91-48b9-a36d-3b567c11b3f9 service nova] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Detach interface failed, port_id=a9da993b-1d38-4109-b090-f639a239a687, reason: Instance bb2b405e-6207-4718-9485-0271d26c160f could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 969.268778] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735947, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078612} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.269231] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 969.270370] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4cf7f7-3e3d-4532-a0b5-32423dabef76 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.296256] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 455578fa-7468-40dc-8c0a-37ac35e5c0a0/455578fa-7468-40dc-8c0a-37ac35e5c0a0.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 969.296578] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36407317-865e-43e3-bc53-6ebfa0d9518d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.321340] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Waiting for the task: (returnval){ [ 969.321340] env[63028]: value = "task-2735948" [ 969.321340] env[63028]: _type = "Task" [ 969.321340] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.331709] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735948, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.423947] env[63028]: INFO nova.compute.manager [-] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Took 1.50 seconds to deallocate network for instance. [ 969.456718] env[63028]: DEBUG nova.scheduler.client.report [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 969.603980] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Acquiring lock "7e914e49-0d70-4024-940b-ad2a15e9dff7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.604444] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Lock "7e914e49-0d70-4024-940b-ad2a15e9dff7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.604784] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Acquiring lock "7e914e49-0d70-4024-940b-ad2a15e9dff7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.605174] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Lock "7e914e49-0d70-4024-940b-ad2a15e9dff7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.605827] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Lock "7e914e49-0d70-4024-940b-ad2a15e9dff7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.608076] env[63028]: INFO nova.compute.manager [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Terminating instance [ 969.764208] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 969.764576] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550816', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'name': 'volume-15326c33-7e0b-41be-bf2e-5b82153cea0d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '46dc76bc-854f-46ad-9db5-21cf6f40fb21', 'attached_at': '', 'detached_at': '', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'serial': '15326c33-7e0b-41be-bf2e-5b82153cea0d'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 969.767706] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b2f908-70a9-4b65-863c-a55f90ec5b73 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.791819] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0bf0b99-fa4d-492e-ab3c-b72f351823cf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.824195] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] volume-15326c33-7e0b-41be-bf2e-5b82153cea0d/volume-15326c33-7e0b-41be-bf2e-5b82153cea0d.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 969.824824] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abc9994f-aeb0-4393-9444-869502333403 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.853335] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735948, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.857515] env[63028]: DEBUG nova.compute.manager [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 969.863223] env[63028]: DEBUG oslo_vmware.api [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 969.863223] env[63028]: value = "task-2735949" [ 969.863223] env[63028]: _type = "Task" [ 969.863223] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.873342] env[63028]: DEBUG oslo_vmware.api [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735949, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.909536] env[63028]: DEBUG oslo_concurrency.lockutils [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.909536] env[63028]: DEBUG oslo_concurrency.lockutils [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.910342] env[63028]: INFO nova.compute.manager [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Shelving [ 969.914090] env[63028]: DEBUG nova.virt.hardware [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 969.914511] env[63028]: DEBUG nova.virt.hardware [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.914676] env[63028]: DEBUG nova.virt.hardware [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 969.914751] env[63028]: DEBUG nova.virt.hardware [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.914888] env[63028]: DEBUG nova.virt.hardware [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 969.915053] env[63028]: DEBUG nova.virt.hardware [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 969.915537] env[63028]: DEBUG nova.virt.hardware [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 969.915537] env[63028]: DEBUG nova.virt.hardware [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 969.915825] env[63028]: DEBUG nova.virt.hardware [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 969.916181] env[63028]: DEBUG nova.virt.hardware [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 969.919454] env[63028]: DEBUG nova.virt.hardware [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 969.920430] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7dca313-5fd8-4e90-9f57-5b556ca31afa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.933328] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a3be7a-9dd3-4074-9581-16e9501541f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.940598] env[63028]: DEBUG oslo_concurrency.lockutils [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.967018] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.137s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.969786] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.024s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.971547] env[63028]: INFO nova.compute.claims [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.999909] env[63028]: INFO nova.scheduler.client.report [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Deleted allocations for instance 8f621e7b-0c76-4f70-830d-09d28a2e0736 [ 970.112481] env[63028]: DEBUG nova.compute.manager [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 970.112766] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 970.114117] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f938db-46be-424f-a9c7-a7bc1c5790aa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.124038] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.125311] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c5fe6ca-6d09-4196-bda2-f9bb5cc17410 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.131521] env[63028]: DEBUG oslo_vmware.api [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Waiting for the task: (returnval){ [ 970.131521] env[63028]: value = "task-2735950" [ 970.131521] env[63028]: _type = "Task" [ 970.131521] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.147082] env[63028]: DEBUG oslo_vmware.api [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.339894] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735948, 'name': ReconfigVM_Task, 'duration_secs': 0.639555} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.340541] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 455578fa-7468-40dc-8c0a-37ac35e5c0a0/455578fa-7468-40dc-8c0a-37ac35e5c0a0.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 970.343219] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66c6127c-ff7f-49fa-bf37-36e35aabb73c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.352506] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Waiting for the task: (returnval){ [ 970.352506] env[63028]: value = "task-2735951" [ 970.352506] env[63028]: _type = "Task" [ 970.352506] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.368776] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735951, 'name': Rename_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.383881] env[63028]: DEBUG oslo_vmware.api [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735949, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.514916] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0276dd8a-7d26-4e27-a04b-31fa55ca6c10 tempest-ServerGroupTestJSON-753068178 tempest-ServerGroupTestJSON-753068178-project-member] Lock "8f621e7b-0c76-4f70-830d-09d28a2e0736" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.048s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.598485] env[63028]: DEBUG nova.compute.manager [req-41e3d5ce-cd91-4a3b-868c-38272dd7dd92 req-0b22a263-0748-4a20-b25f-2f9c8f623b99 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Received event network-vif-plugged-de1f8509-63e1-41ae-ad48-03c4e8b74871 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 970.598834] env[63028]: DEBUG oslo_concurrency.lockutils [req-41e3d5ce-cd91-4a3b-868c-38272dd7dd92 req-0b22a263-0748-4a20-b25f-2f9c8f623b99 service nova] Acquiring lock "56d6982d-9f76-4952-8c8b-f64b3c8d02fe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.602112] env[63028]: DEBUG oslo_concurrency.lockutils [req-41e3d5ce-cd91-4a3b-868c-38272dd7dd92 req-0b22a263-0748-4a20-b25f-2f9c8f623b99 service nova] Lock "56d6982d-9f76-4952-8c8b-f64b3c8d02fe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.602112] env[63028]: DEBUG oslo_concurrency.lockutils [req-41e3d5ce-cd91-4a3b-868c-38272dd7dd92 req-0b22a263-0748-4a20-b25f-2f9c8f623b99 service nova] Lock "56d6982d-9f76-4952-8c8b-f64b3c8d02fe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.602112] env[63028]: DEBUG nova.compute.manager [req-41e3d5ce-cd91-4a3b-868c-38272dd7dd92 req-0b22a263-0748-4a20-b25f-2f9c8f623b99 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] No waiting events found dispatching network-vif-plugged-de1f8509-63e1-41ae-ad48-03c4e8b74871 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 970.602112] env[63028]: WARNING nova.compute.manager [req-41e3d5ce-cd91-4a3b-868c-38272dd7dd92 req-0b22a263-0748-4a20-b25f-2f9c8f623b99 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Received unexpected event network-vif-plugged-de1f8509-63e1-41ae-ad48-03c4e8b74871 for instance with vm_state building and task_state spawning. [ 970.646971] env[63028]: DEBUG oslo_vmware.api [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735950, 'name': PowerOffVM_Task, 'duration_secs': 0.308798} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.647980] env[63028]: DEBUG nova.network.neutron [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Successfully updated port: de1f8509-63e1-41ae-ad48-03c4e8b74871 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 970.649307] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 970.649901] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 970.650713] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0ab2a33-00b8-4a30-86c7-91d4274e85e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.714713] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 970.714713] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 970.714894] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Deleting the datastore file [datastore1] 7e914e49-0d70-4024-940b-ad2a15e9dff7 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 970.715280] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dac18150-c5bc-42d5-810d-0a67e463b724 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.722549] env[63028]: DEBUG oslo_vmware.api [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Waiting for the task: (returnval){ [ 970.722549] env[63028]: value = "task-2735953" [ 970.722549] env[63028]: _type = "Task" [ 970.722549] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.736143] env[63028]: DEBUG oslo_vmware.api [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735953, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.870983] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735951, 'name': Rename_Task, 'duration_secs': 0.220768} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.877199] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.877756] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e3f5921-24a3-4a78-8e5b-5ba7b1d76ed6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.888978] env[63028]: DEBUG oslo_vmware.api [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735949, 'name': ReconfigVM_Task, 'duration_secs': 0.594134} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.890471] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Reconfigured VM instance instance-0000004e to attach disk [datastore2] volume-15326c33-7e0b-41be-bf2e-5b82153cea0d/volume-15326c33-7e0b-41be-bf2e-5b82153cea0d.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 970.894922] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Waiting for the task: (returnval){ [ 970.894922] env[63028]: value = "task-2735954" [ 970.894922] env[63028]: _type = "Task" [ 970.894922] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.894922] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45838f03-69f9-489b-a193-a5b9f10af30d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.915259] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735954, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.918475] env[63028]: DEBUG oslo_vmware.api [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 970.918475] env[63028]: value = "task-2735955" [ 970.918475] env[63028]: _type = "Task" [ 970.918475] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.928095] env[63028]: DEBUG oslo_vmware.api [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735955, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.936980] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.937770] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2b6e7de-afba-4daa-aa6f-fcfd9df4da65 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.947484] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 970.947484] env[63028]: value = "task-2735956" [ 970.947484] env[63028]: _type = "Task" [ 970.947484] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.957266] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735956, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.151694] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "refresh_cache-56d6982d-9f76-4952-8c8b-f64b3c8d02fe" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.151856] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquired lock "refresh_cache-56d6982d-9f76-4952-8c8b-f64b3c8d02fe" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.152026] env[63028]: DEBUG nova.network.neutron [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 971.240651] env[63028]: DEBUG oslo_vmware.api [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Task: {'id': task-2735953, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226519} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.243584] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 971.243798] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 971.244119] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 971.244185] env[63028]: INFO nova.compute.manager [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 971.244443] env[63028]: DEBUG oslo.service.loopingcall [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 971.244925] env[63028]: DEBUG nova.compute.manager [-] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 971.245034] env[63028]: DEBUG nova.network.neutron [-] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 971.429760] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735954, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.437134] env[63028]: DEBUG oslo_vmware.api [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735955, 'name': ReconfigVM_Task, 'duration_secs': 0.232178} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.439754] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550816', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'name': 'volume-15326c33-7e0b-41be-bf2e-5b82153cea0d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '46dc76bc-854f-46ad-9db5-21cf6f40fb21', 'attached_at': '', 'detached_at': '', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'serial': '15326c33-7e0b-41be-bf2e-5b82153cea0d'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 971.457138] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735956, 'name': PowerOffVM_Task, 'duration_secs': 0.319205} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.459973] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 971.461674] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d237ea-4221-4efd-bdf9-ce70d1d7f359 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.485578] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c58a76-82d6-4469-8c39-199fab2394ed {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.515353] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfad357-1265-4506-b7d7-eed4e0fc9ede {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.523244] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c373f3-4878-4e1a-9fad-d10e78fb93eb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.558309] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481fd865-d697-4a36-97eb-f3aac4a0946c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.567221] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14f6402-e2d2-4f4d-89a6-eb263c17d7cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.585698] env[63028]: DEBUG nova.compute.provider_tree [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.698311] env[63028]: DEBUG nova.network.neutron [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 971.922633] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735954, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.000321] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 972.000321] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b76974cf-8809-4c42-b4d3-d3674a49c3f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.012141] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 972.012141] env[63028]: value = "task-2735957" [ 972.012141] env[63028]: _type = "Task" [ 972.012141] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.019273] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735957, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.037556] env[63028]: DEBUG nova.network.neutron [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Updating instance_info_cache with network_info: [{"id": "de1f8509-63e1-41ae-ad48-03c4e8b74871", "address": "fa:16:3e:4e:c3:80", "network": {"id": "8fd5dd35-416e-45e2-aea5-8c2c22752ef7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1819490475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae448aa28a84aa6863fffc24a5448fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde1f8509-63", "ovs_interfaceid": "de1f8509-63e1-41ae-ad48-03c4e8b74871", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.088829] env[63028]: DEBUG nova.scheduler.client.report [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.386163] env[63028]: DEBUG nova.network.neutron [-] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.423099] env[63028]: DEBUG oslo_vmware.api [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2735954, 'name': PowerOnVM_Task, 'duration_secs': 1.159847} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.424067] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 972.424067] env[63028]: INFO nova.compute.manager [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Took 9.27 seconds to spawn the instance on the hypervisor. [ 972.424564] env[63028]: DEBUG nova.compute.manager [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.426026] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38de188c-b968-4fe2-8ec0-91cc292c102d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.522030] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735957, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.546018] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Releasing lock "refresh_cache-56d6982d-9f76-4952-8c8b-f64b3c8d02fe" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.546018] env[63028]: DEBUG nova.compute.manager [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Instance network_info: |[{"id": "de1f8509-63e1-41ae-ad48-03c4e8b74871", "address": "fa:16:3e:4e:c3:80", "network": {"id": "8fd5dd35-416e-45e2-aea5-8c2c22752ef7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1819490475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae448aa28a84aa6863fffc24a5448fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde1f8509-63", "ovs_interfaceid": "de1f8509-63e1-41ae-ad48-03c4e8b74871", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 972.546018] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:c3:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '489b2441-7132-4942-8b61-49cf0ad4400e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de1f8509-63e1-41ae-ad48-03c4e8b74871', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 972.553814] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Creating folder: Project (bae448aa28a84aa6863fffc24a5448fb). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 972.557397] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90c8c2f5-68c5-4970-9af2-c44fd197c4ea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.563350] env[63028]: DEBUG nova.objects.instance [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lazy-loading 'flavor' on Instance uuid 46dc76bc-854f-46ad-9db5-21cf6f40fb21 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.570224] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Created folder: Project (bae448aa28a84aa6863fffc24a5448fb) in parent group-v550570. [ 972.570224] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Creating folder: Instances. Parent ref: group-v550822. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 972.570224] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a226b16b-2500-46fa-b596-5467a9263449 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.578164] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Created folder: Instances in parent group-v550822. [ 972.578539] env[63028]: DEBUG oslo.service.loopingcall [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 972.579018] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 972.579340] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95114200-7815-46d0-b2fd-6401f15b8338 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.606868] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.634s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.606868] env[63028]: DEBUG nova.compute.manager [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 972.608038] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.064s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.610533] env[63028]: INFO nova.compute.claims [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 972.622272] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 972.622272] env[63028]: value = "task-2735960" [ 972.622272] env[63028]: _type = "Task" [ 972.622272] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.632211] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735960, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.639453] env[63028]: DEBUG nova.compute.manager [req-0bc5838a-484b-4ab1-9557-627960365f4a req-591bcefa-4c6e-4ffd-b990-57253922dd0f service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Received event network-changed-de1f8509-63e1-41ae-ad48-03c4e8b74871 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 972.639453] env[63028]: DEBUG nova.compute.manager [req-0bc5838a-484b-4ab1-9557-627960365f4a req-591bcefa-4c6e-4ffd-b990-57253922dd0f service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Refreshing instance network info cache due to event network-changed-de1f8509-63e1-41ae-ad48-03c4e8b74871. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 972.639641] env[63028]: DEBUG oslo_concurrency.lockutils [req-0bc5838a-484b-4ab1-9557-627960365f4a req-591bcefa-4c6e-4ffd-b990-57253922dd0f service nova] Acquiring lock "refresh_cache-56d6982d-9f76-4952-8c8b-f64b3c8d02fe" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.640020] env[63028]: DEBUG oslo_concurrency.lockutils [req-0bc5838a-484b-4ab1-9557-627960365f4a req-591bcefa-4c6e-4ffd-b990-57253922dd0f service nova] Acquired lock "refresh_cache-56d6982d-9f76-4952-8c8b-f64b3c8d02fe" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.640203] env[63028]: DEBUG nova.network.neutron [req-0bc5838a-484b-4ab1-9557-627960365f4a req-591bcefa-4c6e-4ffd-b990-57253922dd0f service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Refreshing network info cache for port de1f8509-63e1-41ae-ad48-03c4e8b74871 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 972.669698] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Acquiring lock "53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.670057] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Lock "53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.888935] env[63028]: INFO nova.compute.manager [-] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Took 1.64 seconds to deallocate network for instance. [ 972.948952] env[63028]: INFO nova.compute.manager [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Took 42.60 seconds to build instance. [ 973.022034] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735957, 'name': CreateSnapshot_Task, 'duration_secs': 0.61458} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.022340] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 973.023310] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af335bb6-8226-4203-b52d-c03f5a0fdd5b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.070436] env[63028]: DEBUG oslo_concurrency.lockutils [None req-46e72be6-8f6a-47bc-8465-8e79a24584ee tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.416s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.119352] env[63028]: DEBUG nova.compute.utils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 973.124650] env[63028]: DEBUG nova.compute.manager [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 973.124854] env[63028]: DEBUG nova.network.neutron [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 973.145371] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735960, 'name': CreateVM_Task, 'duration_secs': 0.356841} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.146360] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 973.147522] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.147767] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.148516] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 973.148981] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf89ff1f-3c92-46e9-a91f-259b9a145979 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.157525] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 973.157525] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528f0158-0f31-d05e-a4da-a752499169e7" [ 973.157525] env[63028]: _type = "Task" [ 973.157525] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.168452] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528f0158-0f31-d05e-a4da-a752499169e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.174403] env[63028]: DEBUG nova.compute.manager [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 973.202827] env[63028]: DEBUG nova.policy [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c022ca18b0a41ce9d790fa25f6ebf8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea26842446ec4691a6456a6659188704', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 973.399142] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.468540] env[63028]: DEBUG oslo_concurrency.lockutils [None req-43ba495d-bfef-4eb6-9fde-f165b7e47fca tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Lock "455578fa-7468-40dc-8c0a-37ac35e5c0a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.134s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.542774] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 973.543259] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3ab5fdff-0677-444a-b2ef-74daace8b7a7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.552819] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 973.552819] env[63028]: value = "task-2735961" [ 973.552819] env[63028]: _type = "Task" [ 973.552819] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.563989] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735961, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.574289] env[63028]: DEBUG nova.network.neutron [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Successfully created port: 0e62de6a-f8ad-4958-81a2-9ff79a6bea03 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 973.624096] env[63028]: DEBUG nova.compute.manager [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 973.674079] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528f0158-0f31-d05e-a4da-a752499169e7, 'name': SearchDatastore_Task, 'duration_secs': 0.029694} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.680250] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.680686] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 973.681250] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.681506] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.681921] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.689360] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ebf1580-c708-4ba5-824d-62f1703db496 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.706522] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.706727] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 973.707739] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46fb695f-b6fb-4ab7-8e56-c762e00b495a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.712170] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.720893] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 973.720893] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a29c27-ee03-72bd-5af8-d0baabbde374" [ 973.720893] env[63028]: _type = "Task" [ 973.720893] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.729974] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a29c27-ee03-72bd-5af8-d0baabbde374, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.859074] env[63028]: DEBUG nova.network.neutron [req-0bc5838a-484b-4ab1-9557-627960365f4a req-591bcefa-4c6e-4ffd-b990-57253922dd0f service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Updated VIF entry in instance network info cache for port de1f8509-63e1-41ae-ad48-03c4e8b74871. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 973.860088] env[63028]: DEBUG nova.network.neutron [req-0bc5838a-484b-4ab1-9557-627960365f4a req-591bcefa-4c6e-4ffd-b990-57253922dd0f service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Updating instance_info_cache with network_info: [{"id": "de1f8509-63e1-41ae-ad48-03c4e8b74871", "address": "fa:16:3e:4e:c3:80", "network": {"id": "8fd5dd35-416e-45e2-aea5-8c2c22752ef7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1819490475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae448aa28a84aa6863fffc24a5448fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde1f8509-63", "ovs_interfaceid": "de1f8509-63e1-41ae-ad48-03c4e8b74871", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.063825] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735961, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.134025] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bae4d7-c2f5-4ba4-bc5b-98d3f4fa650e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.142288] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d8e084-61ed-4ea5-b9c2-df89ede34ddc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.177448] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df60449d-7ecd-4d9b-8529-45699cc59ad4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.185933] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37857e0b-9224-4b28-9327-538abbb77287 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.202677] env[63028]: DEBUG nova.compute.provider_tree [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.209825] env[63028]: INFO nova.compute.manager [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Rebuilding instance [ 974.235034] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a29c27-ee03-72bd-5af8-d0baabbde374, 'name': SearchDatastore_Task, 'duration_secs': 0.018457} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.239294] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b46c320b-f212-4c48-9758-3db493805187 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.243862] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 974.243862] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52771835-623d-a6ef-eb9f-02e997516bf8" [ 974.243862] env[63028]: _type = "Task" [ 974.243862] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.256056] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52771835-623d-a6ef-eb9f-02e997516bf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.265790] env[63028]: DEBUG nova.compute.manager [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 974.266671] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf26bbc-eb89-452c-8b56-30de87943cc7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.362855] env[63028]: DEBUG oslo_concurrency.lockutils [req-0bc5838a-484b-4ab1-9557-627960365f4a req-591bcefa-4c6e-4ffd-b990-57253922dd0f service nova] Releasing lock "refresh_cache-56d6982d-9f76-4952-8c8b-f64b3c8d02fe" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.363067] env[63028]: DEBUG nova.compute.manager [req-0bc5838a-484b-4ab1-9557-627960365f4a req-591bcefa-4c6e-4ffd-b990-57253922dd0f service nova] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Received event network-vif-deleted-4bf1e126-9048-489d-9c61-18ebe4c2af31 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 974.565521] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735961, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.641052] env[63028]: DEBUG nova.compute.manager [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 974.663590] env[63028]: DEBUG nova.virt.hardware [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 974.665121] env[63028]: DEBUG nova.virt.hardware [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 974.665121] env[63028]: DEBUG nova.virt.hardware [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 974.665121] env[63028]: DEBUG nova.virt.hardware [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 974.665309] env[63028]: DEBUG nova.virt.hardware [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 974.665345] env[63028]: DEBUG nova.virt.hardware [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 974.665584] env[63028]: DEBUG nova.virt.hardware [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 974.665765] env[63028]: DEBUG nova.virt.hardware [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 974.665944] env[63028]: DEBUG nova.virt.hardware [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 974.666119] env[63028]: DEBUG nova.virt.hardware [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 974.666298] env[63028]: DEBUG nova.virt.hardware [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 974.667264] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f67e81-54d5-42fc-af6d-d784dfe39848 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.676849] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadfc279-151d-448b-ac2d-e46291b0f3bf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.710755] env[63028]: DEBUG nova.scheduler.client.report [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 974.756766] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52771835-623d-a6ef-eb9f-02e997516bf8, 'name': SearchDatastore_Task, 'duration_secs': 0.021632} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.757073] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.757380] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 56d6982d-9f76-4952-8c8b-f64b3c8d02fe/56d6982d-9f76-4952-8c8b-f64b3c8d02fe.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 974.758038] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d94dc48-de62-4774-84ee-3b547c0a28b7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.764716] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 974.764716] env[63028]: value = "task-2735962" [ 974.764716] env[63028]: _type = "Task" [ 974.764716] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.774193] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.066637] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735961, 'name': CloneVM_Task, 'duration_secs': 1.420193} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.067104] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Created linked-clone VM from snapshot [ 975.067854] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9156c4-5dee-497b-a9bb-9739f83a7759 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.077260] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Uploading image 6d24fdbe-d9bb-49de-8413-30e0956a718a {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 975.107143] env[63028]: DEBUG oslo_vmware.rw_handles [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 975.107143] env[63028]: value = "vm-550826" [ 975.107143] env[63028]: _type = "VirtualMachine" [ 975.107143] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 975.107475] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d6e872a0-4cfc-4e49-84a8-ffe3c9bcc1d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.116238] env[63028]: DEBUG oslo_vmware.rw_handles [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lease: (returnval){ [ 975.116238] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c3c3c0-1b34-5d20-8a19-ebcc4b660bde" [ 975.116238] env[63028]: _type = "HttpNfcLease" [ 975.116238] env[63028]: } obtained for exporting VM: (result){ [ 975.116238] env[63028]: value = "vm-550826" [ 975.116238] env[63028]: _type = "VirtualMachine" [ 975.116238] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 975.116238] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the lease: (returnval){ [ 975.116238] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c3c3c0-1b34-5d20-8a19-ebcc4b660bde" [ 975.116238] env[63028]: _type = "HttpNfcLease" [ 975.116238] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 975.126032] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 975.126032] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c3c3c0-1b34-5d20-8a19-ebcc4b660bde" [ 975.126032] env[63028]: _type = "HttpNfcLease" [ 975.126032] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 975.215158] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.215663] env[63028]: DEBUG nova.compute.manager [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 975.218670] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 21.220s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.278517] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735962, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.280184] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 975.280460] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7ae862a-6ad0-4c56-84eb-6b2ffcefc4ba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.287751] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 975.287751] env[63028]: value = "task-2735964" [ 975.287751] env[63028]: _type = "Task" [ 975.287751] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.298605] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735964, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.442145] env[63028]: DEBUG nova.network.neutron [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Successfully updated port: 0e62de6a-f8ad-4958-81a2-9ff79a6bea03 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 975.608258] env[63028]: DEBUG nova.compute.manager [req-61d39860-edab-460b-ae1a-ec1c280e5ce2 req-5cceab66-1901-48ff-8b1e-529673d25fd1 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Received event network-changed-3cae8ebc-a19e-401f-aa80-28da2e6bcd42 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 975.608392] env[63028]: DEBUG nova.compute.manager [req-61d39860-edab-460b-ae1a-ec1c280e5ce2 req-5cceab66-1901-48ff-8b1e-529673d25fd1 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Refreshing instance network info cache due to event network-changed-3cae8ebc-a19e-401f-aa80-28da2e6bcd42. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 975.608586] env[63028]: DEBUG oslo_concurrency.lockutils [req-61d39860-edab-460b-ae1a-ec1c280e5ce2 req-5cceab66-1901-48ff-8b1e-529673d25fd1 service nova] Acquiring lock "refresh_cache-455578fa-7468-40dc-8c0a-37ac35e5c0a0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.608729] env[63028]: DEBUG oslo_concurrency.lockutils [req-61d39860-edab-460b-ae1a-ec1c280e5ce2 req-5cceab66-1901-48ff-8b1e-529673d25fd1 service nova] Acquired lock "refresh_cache-455578fa-7468-40dc-8c0a-37ac35e5c0a0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.608909] env[63028]: DEBUG nova.network.neutron [req-61d39860-edab-460b-ae1a-ec1c280e5ce2 req-5cceab66-1901-48ff-8b1e-529673d25fd1 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Refreshing network info cache for port 3cae8ebc-a19e-401f-aa80-28da2e6bcd42 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 975.624824] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 975.624824] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c3c3c0-1b34-5d20-8a19-ebcc4b660bde" [ 975.624824] env[63028]: _type = "HttpNfcLease" [ 975.624824] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 975.625372] env[63028]: DEBUG oslo_vmware.rw_handles [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 975.625372] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c3c3c0-1b34-5d20-8a19-ebcc4b660bde" [ 975.625372] env[63028]: _type = "HttpNfcLease" [ 975.625372] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 975.626132] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78df9dfa-a523-444e-aae6-325f00e861b5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.633242] env[63028]: DEBUG oslo_vmware.rw_handles [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d888c5-5bc4-74ca-e5fe-a3c4c5c7bf40/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 975.633439] env[63028]: DEBUG oslo_vmware.rw_handles [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d888c5-5bc4-74ca-e5fe-a3c4c5c7bf40/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 975.723542] env[63028]: DEBUG nova.compute.utils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 975.737520] env[63028]: DEBUG nova.compute.manager [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 975.737729] env[63028]: DEBUG nova.network.neutron [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 975.742343] env[63028]: DEBUG nova.compute.manager [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 975.746979] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8a34d704-8bcd-46e9-ab5e-2148d45634fb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.782604] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735962, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.651334} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.782750] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 56d6982d-9f76-4952-8c8b-f64b3c8d02fe/56d6982d-9f76-4952-8c8b-f64b3c8d02fe.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 975.782899] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 975.783176] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95c626f1-9679-473e-8e99-16bd9fa9027c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.792442] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 975.792442] env[63028]: value = "task-2735965" [ 975.792442] env[63028]: _type = "Task" [ 975.792442] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.798875] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735964, 'name': PowerOffVM_Task, 'duration_secs': 0.250513} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.799532] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 975.804397] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735965, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.805855] env[63028]: DEBUG nova.policy [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54076b7d25474185b3f205437cb68be8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68de7445caeb4381b9e68c685ccb5e0b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 975.869649] env[63028]: INFO nova.compute.manager [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Detaching volume 15326c33-7e0b-41be-bf2e-5b82153cea0d [ 975.905990] env[63028]: DEBUG oslo_concurrency.lockutils [None req-af179e16-8efe-49df-98d4-457d6f23482d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "interface-b77ba7d6-305e-4b60-a4b7-9353c12c3920-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.906365] env[63028]: DEBUG oslo_concurrency.lockutils [None req-af179e16-8efe-49df-98d4-457d6f23482d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-b77ba7d6-305e-4b60-a4b7-9353c12c3920-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.906700] env[63028]: DEBUG nova.objects.instance [None req-af179e16-8efe-49df-98d4-457d6f23482d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'flavor' on Instance uuid b77ba7d6-305e-4b60-a4b7-9353c12c3920 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.911376] env[63028]: INFO nova.virt.block_device [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Attempting to driver detach volume 15326c33-7e0b-41be-bf2e-5b82153cea0d from mountpoint /dev/sdb [ 975.911376] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 975.911376] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550816', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'name': 'volume-15326c33-7e0b-41be-bf2e-5b82153cea0d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '46dc76bc-854f-46ad-9db5-21cf6f40fb21', 'attached_at': '', 'detached_at': '', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'serial': '15326c33-7e0b-41be-bf2e-5b82153cea0d'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 975.911376] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e03e782-cab9-4355-95c6-aae70d39676e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.942274] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2efdfd7f-b085-44fe-bd8e-676c0767bd13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.947367] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "refresh_cache-c386c117-e255-4c3b-9a37-011e517277de" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.947515] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "refresh_cache-c386c117-e255-4c3b-9a37-011e517277de" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.947666] env[63028]: DEBUG nova.network.neutron [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 975.953485] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afde176b-0ecc-4a88-81d6-7f556501280f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.978869] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bbcf42-7c3e-4b8b-9e38-62bae0e65629 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.996294] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] The volume has not been displaced from its original location: [datastore2] volume-15326c33-7e0b-41be-bf2e-5b82153cea0d/volume-15326c33-7e0b-41be-bf2e-5b82153cea0d.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 976.001659] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Reconfiguring VM instance instance-0000004e to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 976.003259] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27e932d1-1f6d-4ec5-93e2-b134cf40a1aa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.027064] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 976.027064] env[63028]: value = "task-2735966" [ 976.027064] env[63028]: _type = "Task" [ 976.027064] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.037989] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735966, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.233160] env[63028]: DEBUG nova.network.neutron [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Successfully created port: 7c42e931-e162-4201-8483-8606a86e0dff {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 976.246397] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Applying migration context for instance ed872f21-c2c4-4597-8c9e-9f8d2202b707 as it has an incoming, in-progress migration 8ac4afc4-5919-47ff-9cce-ace0d06a50ba. Migration status is confirming {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 976.249106] env[63028]: INFO nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating resource usage from migration 8ac4afc4-5919-47ff-9cce-ace0d06a50ba [ 976.284579] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance ba57ed92-aaef-460c-bd45-d0cbe09e4615 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 976.286363] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance d6137c80-0c09-4655-b264-472753b4fa9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.286546] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance da88308f-ce62-40af-adae-e38aa506bdd9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.286755] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 7e914e49-0d70-4024-940b-ad2a15e9dff7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 976.286927] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 85aafadb-81d6-4687-aed1-fbe829e5f95f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.287657] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.287848] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 79f4ef22-a589-4d5c-8832-5d5dcdd55561 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.288079] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance a50e1167-d8ed-4099-83c3-a5066ab0be1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.288987] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 1d008794-3c1a-46c6-b4eb-3d5441efdb22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.289399] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance f0ca0d73-d428-4b8c-acac-a80b7b7dd793 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.289979] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance b77ba7d6-305e-4b60-a4b7-9353c12c3920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.289979] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 46dc76bc-854f-46ad-9db5-21cf6f40fb21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.290189] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 8bb61bfa-d44e-4e06-867a-445d9b3db660 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.290306] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 672695c2-06f3-4790-a459-4b575baf29d3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 976.290831] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Migration 8ac4afc4-5919-47ff-9cce-ace0d06a50ba is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 976.290949] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance ed872f21-c2c4-4597-8c9e-9f8d2202b707 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.291114] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance bb2b405e-6207-4718-9485-0271d26c160f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 976.291245] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance a97224e8-d69b-4c62-ab96-7cef037ef39b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.291465] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.292154] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.292154] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 455578fa-7468-40dc-8c0a-37ac35e5c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.292154] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 56d6982d-9f76-4952-8c8b-f64b3c8d02fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.292154] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance c386c117-e255-4c3b-9a37-011e517277de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.292358] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance c0693e4c-30b2-4eda-be1e-f6186d78038b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.313490] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735965, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066292} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.313490] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 976.314287] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9196e934-29b9-43e2-a729-24b5f576e172 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.343813] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 56d6982d-9f76-4952-8c8b-f64b3c8d02fe/56d6982d-9f76-4952-8c8b-f64b3c8d02fe.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 976.346545] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b72bddbb-5ec6-4340-858f-6facc775e585 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.367155] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 976.367155] env[63028]: value = "task-2735967" [ 976.367155] env[63028]: _type = "Task" [ 976.367155] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.375975] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735967, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.394216] env[63028]: DEBUG nova.network.neutron [req-61d39860-edab-460b-ae1a-ec1c280e5ce2 req-5cceab66-1901-48ff-8b1e-529673d25fd1 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Updated VIF entry in instance network info cache for port 3cae8ebc-a19e-401f-aa80-28da2e6bcd42. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 976.394327] env[63028]: DEBUG nova.network.neutron [req-61d39860-edab-460b-ae1a-ec1c280e5ce2 req-5cceab66-1901-48ff-8b1e-529673d25fd1 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Updating instance_info_cache with network_info: [{"id": "3cae8ebc-a19e-401f-aa80-28da2e6bcd42", "address": "fa:16:3e:da:e5:5c", "network": {"id": "12d9a704-bca6-4132-add3-8d8bb4a11dbd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-831034052-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cc09f51b98c489ba0a3f14161e4a686", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7041d198-66a3-40de-bf7d-cfc036e6ed69", "external-id": "nsx-vlan-transportzone-278", "segmentation_id": 278, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cae8ebc-a1", "ovs_interfaceid": "3cae8ebc-a19e-401f-aa80-28da2e6bcd42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.410372] env[63028]: DEBUG nova.objects.instance [None req-af179e16-8efe-49df-98d4-457d6f23482d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'pci_requests' on Instance uuid b77ba7d6-305e-4b60-a4b7-9353c12c3920 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.483825] env[63028]: DEBUG nova.network.neutron [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 976.539330] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735966, 'name': ReconfigVM_Task, 'duration_secs': 0.472084} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.539690] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Reconfigured VM instance instance-0000004e to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 976.544498] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a231e200-7fe6-4f94-aa03-97ca1329bd81 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.560104] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 976.560104] env[63028]: value = "task-2735968" [ 976.560104] env[63028]: _type = "Task" [ 976.560104] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.571587] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735968, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.650308] env[63028]: DEBUG nova.network.neutron [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Updating instance_info_cache with network_info: [{"id": "0e62de6a-f8ad-4958-81a2-9ff79a6bea03", "address": "fa:16:3e:ba:4e:90", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e62de6a-f8", "ovs_interfaceid": "0e62de6a-f8ad-4958-81a2-9ff79a6bea03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.760044] env[63028]: DEBUG nova.compute.manager [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 976.806874] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance d41a1eae-bb89-4222-9466-d86af891c654 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 976.877837] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735967, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.897336] env[63028]: DEBUG oslo_concurrency.lockutils [req-61d39860-edab-460b-ae1a-ec1c280e5ce2 req-5cceab66-1901-48ff-8b1e-529673d25fd1 service nova] Releasing lock "refresh_cache-455578fa-7468-40dc-8c0a-37ac35e5c0a0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.914605] env[63028]: DEBUG nova.objects.base [None req-af179e16-8efe-49df-98d4-457d6f23482d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 976.914893] env[63028]: DEBUG nova.network.neutron [None req-af179e16-8efe-49df-98d4-457d6f23482d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 976.998178] env[63028]: DEBUG oslo_concurrency.lockutils [None req-af179e16-8efe-49df-98d4-457d6f23482d tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-b77ba7d6-305e-4b60-a4b7-9353c12c3920-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.092s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.073672] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735968, 'name': ReconfigVM_Task, 'duration_secs': 0.214125} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.074061] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550816', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'name': 'volume-15326c33-7e0b-41be-bf2e-5b82153cea0d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '46dc76bc-854f-46ad-9db5-21cf6f40fb21', 'attached_at': '', 'detached_at': '', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'serial': '15326c33-7e0b-41be-bf2e-5b82153cea0d'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 977.154805] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "refresh_cache-c386c117-e255-4c3b-9a37-011e517277de" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.155182] env[63028]: DEBUG nova.compute.manager [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Instance network_info: |[{"id": "0e62de6a-f8ad-4958-81a2-9ff79a6bea03", "address": "fa:16:3e:ba:4e:90", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e62de6a-f8", "ovs_interfaceid": "0e62de6a-f8ad-4958-81a2-9ff79a6bea03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 977.156425] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:4e:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '706c9762-1cf8-4770-897d-377d0d927773', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e62de6a-f8ad-4958-81a2-9ff79a6bea03', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 977.163733] env[63028]: DEBUG oslo.service.loopingcall [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 977.164086] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c386c117-e255-4c3b-9a37-011e517277de] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 977.164265] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50b05721-98b9-4a96-8325-566cad128482 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.184871] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 977.184871] env[63028]: value = "task-2735969" [ 977.184871] env[63028]: _type = "Task" [ 977.184871] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.193153] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735969, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.313839] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 3fb46d02-7914-4d08-b63b-f3447ba1b81a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 977.314230] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Instance with task_state "unshelving" is not being actively managed by this compute host but has allocations referencing this compute node (399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 977.379146] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735967, 'name': ReconfigVM_Task, 'duration_secs': 0.548063} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.379505] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 56d6982d-9f76-4952-8c8b-f64b3c8d02fe/56d6982d-9f76-4952-8c8b-f64b3c8d02fe.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.380200] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e016a15d-1532-4586-b01f-b1a9e00e8e79 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.386704] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 977.386704] env[63028]: value = "task-2735970" [ 977.386704] env[63028]: _type = "Task" [ 977.386704] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.396052] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735970, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.697053] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735969, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.816919] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 977.817268] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 977.817416] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 977.843037] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Refreshing inventories for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 977.859660] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Updating ProviderTree inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 977.859872] env[63028]: DEBUG nova.compute.provider_tree [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 977.873280] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Refreshing aggregate associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, aggregates: None {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 977.900545] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735970, 'name': Rename_Task, 'duration_secs': 0.195009} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.901748] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Refreshing trait associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 977.904615] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 977.905257] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32cb9f2e-a315-468d-a077-2f3c0ddda372 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.913315] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 977.913315] env[63028]: value = "task-2735971" [ 977.913315] env[63028]: _type = "Task" [ 977.913315] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.928711] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735971, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.028623] env[63028]: DEBUG nova.network.neutron [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Successfully updated port: 7c42e931-e162-4201-8483-8606a86e0dff {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 978.130499] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 978.130871] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef6e6002-5fb8-4298-ac7f-070fd5852cdd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.140079] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 978.140079] env[63028]: value = "task-2735972" [ 978.140079] env[63028]: _type = "Task" [ 978.140079] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.151913] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 978.152742] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 978.152972] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550816', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'name': 'volume-15326c33-7e0b-41be-bf2e-5b82153cea0d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '46dc76bc-854f-46ad-9db5-21cf6f40fb21', 'attached_at': '', 'detached_at': '', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'serial': '15326c33-7e0b-41be-bf2e-5b82153cea0d'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 978.154162] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d28e2d4-ffdd-4581-84c6-f9616c7cedf3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.177255] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf82f90-5337-4818-ad7f-d1b9fd756d85 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.183931] env[63028]: WARNING nova.virt.vmwareapi.driver [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 978.184260] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 978.185064] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d0720f-a8fc-4d8e-804b-e03eb04d5d18 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.196642] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 978.196897] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55680a46-0d1e-4dca-9023-310746657b39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.201508] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735969, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.266062] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 978.266062] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 978.266062] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleting the datastore file [datastore2] 46dc76bc-854f-46ad-9db5-21cf6f40fb21 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 978.268564] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26ef0d9f-e451-487e-ba1b-eb1e7f08389b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.274978] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 978.274978] env[63028]: value = "task-2735974" [ 978.274978] env[63028]: _type = "Task" [ 978.274978] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.287105] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.328249] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ca377c-53ed-418d-9073-cf2e6d615b01 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.335862] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdb8862-5eb7-4491-a27a-680f82b488cf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.373171] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1e6501-072a-4dbb-b9e2-ecc9651b5ebd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.381050] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0162854e-4bbd-4593-b8ef-d25a912a4094 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.395336] env[63028]: DEBUG nova.compute.provider_tree [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.423647] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735971, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.505975] env[63028]: DEBUG nova.compute.manager [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] [instance: c386c117-e255-4c3b-9a37-011e517277de] Received event network-vif-plugged-0e62de6a-f8ad-4958-81a2-9ff79a6bea03 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 978.506217] env[63028]: DEBUG oslo_concurrency.lockutils [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] Acquiring lock "c386c117-e255-4c3b-9a37-011e517277de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.506528] env[63028]: DEBUG oslo_concurrency.lockutils [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] Lock "c386c117-e255-4c3b-9a37-011e517277de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.506704] env[63028]: DEBUG oslo_concurrency.lockutils [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] Lock "c386c117-e255-4c3b-9a37-011e517277de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.506906] env[63028]: DEBUG nova.compute.manager [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] [instance: c386c117-e255-4c3b-9a37-011e517277de] No waiting events found dispatching network-vif-plugged-0e62de6a-f8ad-4958-81a2-9ff79a6bea03 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 978.507193] env[63028]: WARNING nova.compute.manager [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] [instance: c386c117-e255-4c3b-9a37-011e517277de] Received unexpected event network-vif-plugged-0e62de6a-f8ad-4958-81a2-9ff79a6bea03 for instance with vm_state building and task_state spawning. [ 978.507393] env[63028]: DEBUG nova.compute.manager [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] [instance: c386c117-e255-4c3b-9a37-011e517277de] Received event network-changed-0e62de6a-f8ad-4958-81a2-9ff79a6bea03 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 978.507576] env[63028]: DEBUG nova.compute.manager [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] [instance: c386c117-e255-4c3b-9a37-011e517277de] Refreshing instance network info cache due to event network-changed-0e62de6a-f8ad-4958-81a2-9ff79a6bea03. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 978.507791] env[63028]: DEBUG oslo_concurrency.lockutils [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] Acquiring lock "refresh_cache-c386c117-e255-4c3b-9a37-011e517277de" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.507939] env[63028]: DEBUG oslo_concurrency.lockutils [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] Acquired lock "refresh_cache-c386c117-e255-4c3b-9a37-011e517277de" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.508121] env[63028]: DEBUG nova.network.neutron [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] [instance: c386c117-e255-4c3b-9a37-011e517277de] Refreshing network info cache for port 0e62de6a-f8ad-4958-81a2-9ff79a6bea03 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 978.531638] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "refresh_cache-c0693e4c-30b2-4eda-be1e-f6186d78038b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.531891] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired lock "refresh_cache-c0693e4c-30b2-4eda-be1e-f6186d78038b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.532113] env[63028]: DEBUG nova.network.neutron [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 978.698358] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735969, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.784662] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735974, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.335681} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.784969] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 978.785184] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 978.785390] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 978.824305] env[63028]: DEBUG nova.virt.hardware [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 978.824527] env[63028]: DEBUG nova.virt.hardware [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 978.824685] env[63028]: DEBUG nova.virt.hardware [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 978.824872] env[63028]: DEBUG nova.virt.hardware [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 978.825034] env[63028]: DEBUG nova.virt.hardware [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 978.825197] env[63028]: DEBUG nova.virt.hardware [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 978.825411] env[63028]: DEBUG nova.virt.hardware [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 978.825596] env[63028]: DEBUG nova.virt.hardware [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 978.825796] env[63028]: DEBUG nova.virt.hardware [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 978.825961] env[63028]: DEBUG nova.virt.hardware [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 978.826211] env[63028]: DEBUG nova.virt.hardware [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 978.829775] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6112a183-1ae3-4732-b1df-7a167ed086fc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.839131] env[63028]: DEBUG oslo_vmware.rw_handles [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520a564c-51cc-5fbe-8830-de7c3b623560/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 978.840528] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f47e7f-fad4-4944-8737-9c955eee2583 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.845056] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7510031f-9817-4bdc-87e5-40cb655e28b8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.860326] env[63028]: DEBUG oslo_vmware.rw_handles [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520a564c-51cc-5fbe-8830-de7c3b623560/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 978.860951] env[63028]: ERROR oslo_vmware.rw_handles [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520a564c-51cc-5fbe-8830-de7c3b623560/disk-0.vmdk due to incomplete transfer. [ 978.860951] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4a617c47-9cc1-4462-8a10-a2c04261c143 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.868489] env[63028]: DEBUG oslo_vmware.rw_handles [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520a564c-51cc-5fbe-8830-de7c3b623560/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 978.868754] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Uploaded image 89f02435-fa22-4275-ab99-73170c1e53cf to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 978.872248] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 978.874090] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-67855632-6298-4eb5-bbea-1ee751191ca5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.885960] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 978.885960] env[63028]: value = "task-2735975" [ 978.885960] env[63028]: _type = "Task" [ 978.885960] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.895162] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735975, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.899190] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 978.924586] env[63028]: DEBUG oslo_vmware.api [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2735971, 'name': PowerOnVM_Task, 'duration_secs': 0.618725} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.924875] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 978.925093] env[63028]: INFO nova.compute.manager [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Took 9.07 seconds to spawn the instance on the hypervisor. [ 978.925283] env[63028]: DEBUG nova.compute.manager [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 978.926186] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ce8fda-675d-4910-9e59-41beb7b276de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.080359] env[63028]: DEBUG nova.network.neutron [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 979.197526] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735969, 'name': CreateVM_Task, 'duration_secs': 1.524106} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.197717] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c386c117-e255-4c3b-9a37-011e517277de] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 979.198434] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.198607] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.198937] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 979.199213] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64aaf471-ef6b-4fa1-aa21-4c4310fae84e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.204011] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 979.204011] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c9cfb4-3bce-af81-75bf-9c8235f6f4af" [ 979.204011] env[63028]: _type = "Task" [ 979.204011] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.214625] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c9cfb4-3bce-af81-75bf-9c8235f6f4af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.215491] env[63028]: DEBUG nova.network.neutron [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] [instance: c386c117-e255-4c3b-9a37-011e517277de] Updated VIF entry in instance network info cache for port 0e62de6a-f8ad-4958-81a2-9ff79a6bea03. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 979.215833] env[63028]: DEBUG nova.network.neutron [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] [instance: c386c117-e255-4c3b-9a37-011e517277de] Updating instance_info_cache with network_info: [{"id": "0e62de6a-f8ad-4958-81a2-9ff79a6bea03", "address": "fa:16:3e:ba:4e:90", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e62de6a-f8", "ovs_interfaceid": "0e62de6a-f8ad-4958-81a2-9ff79a6bea03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.267149] env[63028]: DEBUG nova.network.neutron [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Updating instance_info_cache with network_info: [{"id": "7c42e931-e162-4201-8483-8606a86e0dff", "address": "fa:16:3e:1a:2a:74", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c42e931-e1", "ovs_interfaceid": "7c42e931-e162-4201-8483-8606a86e0dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.292239] env[63028]: INFO nova.virt.block_device [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Booting with volume 15326c33-7e0b-41be-bf2e-5b82153cea0d at /dev/sdb [ 979.331575] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7be128d3-8918-404e-9918-d8dec2f6f321 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.343300] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c911c93d-12e4-4015-834b-7cf601d0a663 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.384826] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "interface-b77ba7d6-305e-4b60-a4b7-9353c12c3920-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.385068] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-b77ba7d6-305e-4b60-a4b7-9353c12c3920-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.385391] env[63028]: DEBUG nova.objects.instance [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'flavor' on Instance uuid b77ba7d6-305e-4b60-a4b7-9353c12c3920 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.386921] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c3bf310-1572-4b00-96f2-6fcbfd5bca74 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.399864] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735975, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.405395] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bd2be9-c53b-4474-9cdd-1a67ce59b6e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.418483] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 979.418719] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.200s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.419369] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.159s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.420978] env[63028]: INFO nova.compute.claims [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.454853] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281c1781-e75e-4484-92f8-0e6b0c13102c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.457912] env[63028]: INFO nova.compute.manager [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Took 36.67 seconds to build instance. [ 979.463318] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b693d2-3642-44b2-bf76-67c8f38a3b61 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.477918] env[63028]: DEBUG nova.virt.block_device [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Updating existing volume attachment record: 08e66ea2-3cb4-4b0c-a6b3-9699ddfc4390 {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 979.714759] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c9cfb4-3bce-af81-75bf-9c8235f6f4af, 'name': SearchDatastore_Task, 'duration_secs': 0.02389} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.715116] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.715364] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 979.715623] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.715793] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.715984] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 979.716284] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-037360b4-5ed9-45e1-b80b-0e3dfd71035a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.718320] env[63028]: DEBUG oslo_concurrency.lockutils [req-b0e105ef-f18d-4e21-a7a1-4312b05e9f41 req-3fd3abea-951e-4b22-affb-1f11b01a22e4 service nova] Releasing lock "refresh_cache-c386c117-e255-4c3b-9a37-011e517277de" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.725323] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 979.725480] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 979.726235] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7a18f78-f8eb-4205-93a2-a05e7f2251af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.731264] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 979.731264] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52620d18-4c17-894f-7ff1-a78d910dc143" [ 979.731264] env[63028]: _type = "Task" [ 979.731264] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.739450] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52620d18-4c17-894f-7ff1-a78d910dc143, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.770180] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Releasing lock "refresh_cache-c0693e4c-30b2-4eda-be1e-f6186d78038b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.770509] env[63028]: DEBUG nova.compute.manager [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Instance network_info: |[{"id": "7c42e931-e162-4201-8483-8606a86e0dff", "address": "fa:16:3e:1a:2a:74", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c42e931-e1", "ovs_interfaceid": "7c42e931-e162-4201-8483-8606a86e0dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 979.771294] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:2a:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b356db78-99c7-4464-822c-fc7e193f7878', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c42e931-e162-4201-8483-8606a86e0dff', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 979.780100] env[63028]: DEBUG oslo.service.loopingcall [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 979.780100] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 979.780100] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32fc0d94-ed34-4a9a-9b8b-0e9e76bc88af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.800030] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 979.800030] env[63028]: value = "task-2735976" [ 979.800030] env[63028]: _type = "Task" [ 979.800030] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.807408] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735976, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.903063] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735975, 'name': Destroy_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.960029] env[63028]: DEBUG oslo_concurrency.lockutils [None req-303769e7-95d0-483b-bc6d-c7843cb1c5e3 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "56d6982d-9f76-4952-8c8b-f64b3c8d02fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.182s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.015051] env[63028]: DEBUG nova.objects.instance [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'pci_requests' on Instance uuid b77ba7d6-305e-4b60-a4b7-9353c12c3920 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.242940] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52620d18-4c17-894f-7ff1-a78d910dc143, 'name': SearchDatastore_Task, 'duration_secs': 0.020229} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.243896] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f03699ef-d49f-46fc-b3ad-88495e955029 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.249637] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 980.249637] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a27bc9-c549-e8a3-cb56-8d0d5137b313" [ 980.249637] env[63028]: _type = "Task" [ 980.249637] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.260092] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a27bc9-c549-e8a3-cb56-8d0d5137b313, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.310376] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735976, 'name': CreateVM_Task, 'duration_secs': 0.47217} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.310598] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 980.311459] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.311716] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.312084] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 980.312408] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c799d46b-7a88-418b-a4b2-4653897c1b89 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.317594] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 980.317594] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529042f2-d714-4077-f2e9-65dbf447894d" [ 980.317594] env[63028]: _type = "Task" [ 980.317594] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.326241] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529042f2-d714-4077-f2e9-65dbf447894d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.398264] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735975, 'name': Destroy_Task, 'duration_secs': 1.116578} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.398608] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Destroyed the VM [ 980.398801] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 980.399167] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4304eaf5-5789-4dd3-901c-4c603f9b69ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.406384] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 980.406384] env[63028]: value = "task-2735977" [ 980.406384] env[63028]: _type = "Task" [ 980.406384] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.414997] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735977, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.518858] env[63028]: DEBUG nova.objects.base [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 980.518858] env[63028]: DEBUG nova.network.neutron [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 980.570566] env[63028]: DEBUG nova.policy [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b48f3f2a85945379bdb33bf153bde9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25a6457f62d149629c09589feb1a550c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 980.765757] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a27bc9-c549-e8a3-cb56-8d0d5137b313, 'name': SearchDatastore_Task, 'duration_secs': 0.023326} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.766235] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.766302] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c386c117-e255-4c3b-9a37-011e517277de/c386c117-e255-4c3b-9a37-011e517277de.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 980.766577] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a72a51a-e787-4e4e-95d2-c7c86000d9e0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.774811] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 980.774811] env[63028]: value = "task-2735978" [ 980.774811] env[63028]: _type = "Task" [ 980.774811] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.786654] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735978, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.830900] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529042f2-d714-4077-f2e9-65dbf447894d, 'name': SearchDatastore_Task, 'duration_secs': 0.016754} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.831290] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.831540] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 980.831822] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.831979] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.832178] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 980.832879] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a9e6c4b-f10d-4dd4-874c-38fa42b16194 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.839015] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccb8b3d-cb4b-480e-8280-6760006b5397 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.847388] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3733a333-fbb1-47cd-9515-8593f897d557 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.853784] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 980.853784] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 980.853784] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9b2c56f-e23c-4fd3-93ce-5eec609282d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.887188] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbc0073-1b87-406e-9b81-e6043a79c548 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.891825] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 980.891825] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5223c419-93f0-2140-ff1a-5ed1a14ca5e1" [ 980.891825] env[63028]: _type = "Task" [ 980.891825] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.900453] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7682924-2989-453d-b4c3-faee88f18fb9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.908501] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5223c419-93f0-2140-ff1a-5ed1a14ca5e1, 'name': SearchDatastore_Task, 'duration_secs': 0.00918} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.921087] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-240060d4-c72e-4223-a35f-673b3d37894a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.924112] env[63028]: DEBUG nova.compute.provider_tree [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.930425] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735977, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.931996] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 980.931996] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ebf732-1cce-a73f-7e41-a04e0fcc0fac" [ 980.931996] env[63028]: _type = "Task" [ 980.931996] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.942258] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ebf732-1cce-a73f-7e41-a04e0fcc0fac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.033586] env[63028]: DEBUG nova.network.neutron [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Successfully created port: 98722f90-f2d2-4a4a-9e68-ad5c32b18435 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 981.289477] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735978, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.370717] env[63028]: DEBUG nova.compute.manager [req-0a3a3554-1994-4d2f-a60e-d610e2485024 req-28392a6e-7a84-4723-bb3a-65aec5c4aa62 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Received event network-changed-3cae8ebc-a19e-401f-aa80-28da2e6bcd42 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 981.370963] env[63028]: DEBUG nova.compute.manager [req-0a3a3554-1994-4d2f-a60e-d610e2485024 req-28392a6e-7a84-4723-bb3a-65aec5c4aa62 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Refreshing instance network info cache due to event network-changed-3cae8ebc-a19e-401f-aa80-28da2e6bcd42. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 981.371267] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a3a3554-1994-4d2f-a60e-d610e2485024 req-28392a6e-7a84-4723-bb3a-65aec5c4aa62 service nova] Acquiring lock "refresh_cache-455578fa-7468-40dc-8c0a-37ac35e5c0a0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.371422] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a3a3554-1994-4d2f-a60e-d610e2485024 req-28392a6e-7a84-4723-bb3a-65aec5c4aa62 service nova] Acquired lock "refresh_cache-455578fa-7468-40dc-8c0a-37ac35e5c0a0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.371612] env[63028]: DEBUG nova.network.neutron [req-0a3a3554-1994-4d2f-a60e-d610e2485024 req-28392a6e-7a84-4723-bb3a-65aec5c4aa62 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Refreshing network info cache for port 3cae8ebc-a19e-401f-aa80-28da2e6bcd42 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 981.420323] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735977, 'name': RemoveSnapshot_Task, 'duration_secs': 0.637494} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.420630] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 981.420937] env[63028]: DEBUG nova.compute.manager [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 981.421752] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3459c6bd-5a17-469d-a723-ccafc8db6743 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.428125] env[63028]: DEBUG nova.scheduler.client.report [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 981.448065] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ebf732-1cce-a73f-7e41-a04e0fcc0fac, 'name': SearchDatastore_Task, 'duration_secs': 0.011446} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.448065] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.448065] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] c0693e4c-30b2-4eda-be1e-f6186d78038b/c0693e4c-30b2-4eda-be1e-f6186d78038b.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 981.448065] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88c3d63b-ca3a-434f-9ddf-b55b73b88656 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.454570] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 981.454570] env[63028]: value = "task-2735979" [ 981.454570] env[63028]: _type = "Task" [ 981.454570] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.462954] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735979, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.611146] env[63028]: DEBUG nova.virt.hardware [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 981.611427] env[63028]: DEBUG nova.virt.hardware [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 981.611630] env[63028]: DEBUG nova.virt.hardware [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 981.611833] env[63028]: DEBUG nova.virt.hardware [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 981.612026] env[63028]: DEBUG nova.virt.hardware [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 981.612189] env[63028]: DEBUG nova.virt.hardware [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 981.612424] env[63028]: DEBUG nova.virt.hardware [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 981.612614] env[63028]: DEBUG nova.virt.hardware [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 981.612847] env[63028]: DEBUG nova.virt.hardware [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 981.612930] env[63028]: DEBUG nova.virt.hardware [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 981.613157] env[63028]: DEBUG nova.virt.hardware [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 981.614119] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39694d53-7621-4510-9ba5-c858c038602d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.623031] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb5fe5c-dde3-40b6-b57d-7a20dc197e6c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.637847] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:5b:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '11a8272a-a9ff-4d48-860e-8ee1b781a6ab', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 981.645620] env[63028]: DEBUG oslo.service.loopingcall [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 981.645865] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 981.646105] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-161be7c5-c6e0-44d2-ade3-1f395c7da0d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.666861] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 981.666861] env[63028]: value = "task-2735980" [ 981.666861] env[63028]: _type = "Task" [ 981.666861] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.675407] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735980, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.785538] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735978, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.935571] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.936269] env[63028]: DEBUG nova.compute.manager [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 981.939453] env[63028]: DEBUG oslo_concurrency.lockutils [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 22.862s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.943246] env[63028]: INFO nova.compute.manager [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Shelve offloading [ 981.967704] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735979, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.088124] env[63028]: DEBUG nova.network.neutron [req-0a3a3554-1994-4d2f-a60e-d610e2485024 req-28392a6e-7a84-4723-bb3a-65aec5c4aa62 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Updated VIF entry in instance network info cache for port 3cae8ebc-a19e-401f-aa80-28da2e6bcd42. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 982.088527] env[63028]: DEBUG nova.network.neutron [req-0a3a3554-1994-4d2f-a60e-d610e2485024 req-28392a6e-7a84-4723-bb3a-65aec5c4aa62 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Updating instance_info_cache with network_info: [{"id": "3cae8ebc-a19e-401f-aa80-28da2e6bcd42", "address": "fa:16:3e:da:e5:5c", "network": {"id": "12d9a704-bca6-4132-add3-8d8bb4a11dbd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-831034052-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cc09f51b98c489ba0a3f14161e4a686", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7041d198-66a3-40de-bf7d-cfc036e6ed69", "external-id": "nsx-vlan-transportzone-278", "segmentation_id": 278, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cae8ebc-a1", "ovs_interfaceid": "3cae8ebc-a19e-401f-aa80-28da2e6bcd42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.107325] env[63028]: DEBUG nova.compute.manager [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Received event network-vif-plugged-7c42e931-e162-4201-8483-8606a86e0dff {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 982.107325] env[63028]: DEBUG oslo_concurrency.lockutils [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] Acquiring lock "c0693e4c-30b2-4eda-be1e-f6186d78038b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.107651] env[63028]: DEBUG oslo_concurrency.lockutils [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] Lock "c0693e4c-30b2-4eda-be1e-f6186d78038b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.107651] env[63028]: DEBUG oslo_concurrency.lockutils [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] Lock "c0693e4c-30b2-4eda-be1e-f6186d78038b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.107774] env[63028]: DEBUG nova.compute.manager [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] No waiting events found dispatching network-vif-plugged-7c42e931-e162-4201-8483-8606a86e0dff {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 982.107987] env[63028]: WARNING nova.compute.manager [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Received unexpected event network-vif-plugged-7c42e931-e162-4201-8483-8606a86e0dff for instance with vm_state building and task_state spawning. [ 982.108058] env[63028]: DEBUG nova.compute.manager [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Received event network-changed-7c42e931-e162-4201-8483-8606a86e0dff {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 982.108234] env[63028]: DEBUG nova.compute.manager [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Refreshing instance network info cache due to event network-changed-7c42e931-e162-4201-8483-8606a86e0dff. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 982.108416] env[63028]: DEBUG oslo_concurrency.lockutils [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] Acquiring lock "refresh_cache-c0693e4c-30b2-4eda-be1e-f6186d78038b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.108553] env[63028]: DEBUG oslo_concurrency.lockutils [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] Acquired lock "refresh_cache-c0693e4c-30b2-4eda-be1e-f6186d78038b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.108707] env[63028]: DEBUG nova.network.neutron [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Refreshing network info cache for port 7c42e931-e162-4201-8483-8606a86e0dff {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 982.178917] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735980, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.286199] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735978, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.447668] env[63028]: DEBUG nova.compute.utils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 982.449582] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 982.450012] env[63028]: DEBUG nova.compute.manager [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 982.450098] env[63028]: DEBUG nova.network.neutron [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 982.452463] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-533d9296-40dd-42e2-a72b-5eda0a2ce018 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.465757] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735979, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.467612] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 982.467612] env[63028]: value = "task-2735981" [ 982.467612] env[63028]: _type = "Task" [ 982.467612] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.480637] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 982.480637] env[63028]: DEBUG nova.compute.manager [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 982.481398] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa7e039-f1a2-4896-bad0-134834e4b2ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.492795] env[63028]: DEBUG oslo_concurrency.lockutils [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.493018] env[63028]: DEBUG oslo_concurrency.lockutils [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.493211] env[63028]: DEBUG nova.network.neutron [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 982.513275] env[63028]: DEBUG nova.policy [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b3c3b29bb4d4c23a09527bcda019773', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b4dcaef840f940bda057d0371cdc5adb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 982.592355] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a3a3554-1994-4d2f-a60e-d610e2485024 req-28392a6e-7a84-4723-bb3a-65aec5c4aa62 service nova] Releasing lock "refresh_cache-455578fa-7468-40dc-8c0a-37ac35e5c0a0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.683413] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735980, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.791473] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735978, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.542594} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.791473] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] c386c117-e255-4c3b-9a37-011e517277de/c386c117-e255-4c3b-9a37-011e517277de.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 982.791721] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 982.791908] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30912096-6e1b-431f-a905-c3aca4d3286e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.798649] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 982.798649] env[63028]: value = "task-2735982" [ 982.798649] env[63028]: _type = "Task" [ 982.798649] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.807588] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735982, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.861115] env[63028]: DEBUG nova.network.neutron [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Successfully updated port: 98722f90-f2d2-4a4a-9e68-ad5c32b18435 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 982.926182] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81acac0d-5683-405e-9097-c3f789bd608f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.934648] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7cb83d4-957f-4704-93a8-c87acbab4431 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.971690] env[63028]: DEBUG nova.compute.manager [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 982.976162] env[63028]: DEBUG nova.network.neutron [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Updated VIF entry in instance network info cache for port 7c42e931-e162-4201-8483-8606a86e0dff. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 982.976500] env[63028]: DEBUG nova.network.neutron [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Updating instance_info_cache with network_info: [{"id": "7c42e931-e162-4201-8483-8606a86e0dff", "address": "fa:16:3e:1a:2a:74", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c42e931-e1", "ovs_interfaceid": "7c42e931-e162-4201-8483-8606a86e0dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.981938] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ed434f-342a-42a3-aeb4-9f50f9798b23 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.990307] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735979, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.31083} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.993429] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] c0693e4c-30b2-4eda-be1e-f6186d78038b/c0693e4c-30b2-4eda-be1e-f6186d78038b.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 982.993724] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 982.994016] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b4d34ab-f123-4b23-959a-1d6fc1f0b270 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.997048] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17984851-d800-4ec8-ad71-feb2799d8e15 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.018223] env[63028]: DEBUG nova.compute.provider_tree [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.021429] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 983.021429] env[63028]: value = "task-2735983" [ 983.021429] env[63028]: _type = "Task" [ 983.021429] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.030845] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735983, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.036969] env[63028]: DEBUG nova.network.neutron [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Successfully created port: c5f1d585-d624-4525-a5b2-132b18bf9378 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 983.178047] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2735980, 'name': CreateVM_Task, 'duration_secs': 1.366843} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.178233] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 983.179397] env[63028]: DEBUG oslo_concurrency.lockutils [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.179638] env[63028]: DEBUG oslo_concurrency.lockutils [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.180287] env[63028]: DEBUG oslo_concurrency.lockutils [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 983.180498] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d565e2ad-16c6-48af-912f-eb486379765e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.185812] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 983.185812] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5236c7c6-7fc8-a5a6-2716-a850cad0942b" [ 983.185812] env[63028]: _type = "Task" [ 983.185812] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.193699] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5236c7c6-7fc8-a5a6-2716-a850cad0942b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.310054] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735982, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117248} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.310354] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 983.311184] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020a0f84-a209-4fad-b5ca-6a1be7462ac0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.334181] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] c386c117-e255-4c3b-9a37-011e517277de/c386c117-e255-4c3b-9a37-011e517277de.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.334497] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e231e3a0-eb67-4aca-9146-3445dd7af1bd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.354241] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 983.354241] env[63028]: value = "task-2735984" [ 983.354241] env[63028]: _type = "Task" [ 983.354241] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.362233] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735984, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.363266] env[63028]: DEBUG nova.network.neutron [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updating instance_info_cache with network_info: [{"id": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "address": "fa:16:3e:09:d0:a2", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d8403-82", "ovs_interfaceid": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.364769] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.365025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.365302] env[63028]: DEBUG nova.network.neutron [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 983.478702] env[63028]: DEBUG oslo_concurrency.lockutils [req-1ba43428-a0cc-4110-9857-30d8c0a659c9 req-6de69178-91ff-48a8-8fb1-8889952984e5 service nova] Releasing lock "refresh_cache-c0693e4c-30b2-4eda-be1e-f6186d78038b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.523830] env[63028]: DEBUG nova.scheduler.client.report [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 983.537210] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735983, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115542} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.537516] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 983.538897] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f215d862-1df0-4abb-bb25-4342c72f5a46 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.562391] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] c0693e4c-30b2-4eda-be1e-f6186d78038b/c0693e4c-30b2-4eda-be1e-f6186d78038b.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.563196] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-464bd675-3ba9-4f45-9ffa-0c8f7f5e249e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.585210] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 983.585210] env[63028]: value = "task-2735985" [ 983.585210] env[63028]: _type = "Task" [ 983.585210] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.596931] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735985, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.697596] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5236c7c6-7fc8-a5a6-2716-a850cad0942b, 'name': SearchDatastore_Task, 'duration_secs': 0.01154} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.697879] env[63028]: DEBUG oslo_concurrency.lockutils [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.698108] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 983.698348] env[63028]: DEBUG oslo_concurrency.lockutils [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.698495] env[63028]: DEBUG oslo_concurrency.lockutils [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.698954] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 983.698954] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fde5fd4d-44e3-4952-9ac2-06f79f3ccf31 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.710221] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 983.710221] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 983.710726] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e88db37-cb8f-4d4d-b794-3565e73bff51 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.715841] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 983.715841] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5216aa29-b88a-93c5-6fc5-d86bf92e0dc0" [ 983.715841] env[63028]: _type = "Task" [ 983.715841] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.724324] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5216aa29-b88a-93c5-6fc5-d86bf92e0dc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.725532] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.725750] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.864723] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735984, 'name': ReconfigVM_Task, 'duration_secs': 0.33829} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.864723] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Reconfigured VM instance instance-00000058 to attach disk [datastore2] c386c117-e255-4c3b-9a37-011e517277de/c386c117-e255-4c3b-9a37-011e517277de.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.865232] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d738486a-47fd-4c8a-b85f-1c59d9fe2522 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.867519] env[63028]: DEBUG oslo_concurrency.lockutils [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.875789] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 983.875789] env[63028]: value = "task-2735986" [ 983.875789] env[63028]: _type = "Task" [ 983.875789] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.884860] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735986, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.908273] env[63028]: WARNING nova.network.neutron [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] c2f1496c-e3fd-43db-a032-12cdacdb4e46 already exists in list: networks containing: ['c2f1496c-e3fd-43db-a032-12cdacdb4e46']. ignoring it [ 983.984737] env[63028]: DEBUG nova.compute.manager [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 984.013343] env[63028]: DEBUG nova.virt.hardware [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 984.013691] env[63028]: DEBUG nova.virt.hardware [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 984.013889] env[63028]: DEBUG nova.virt.hardware [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 984.014154] env[63028]: DEBUG nova.virt.hardware [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 984.014320] env[63028]: DEBUG nova.virt.hardware [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 984.014472] env[63028]: DEBUG nova.virt.hardware [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 984.014690] env[63028]: DEBUG nova.virt.hardware [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 984.014855] env[63028]: DEBUG nova.virt.hardware [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 984.015043] env[63028]: DEBUG nova.virt.hardware [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 984.015214] env[63028]: DEBUG nova.virt.hardware [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 984.015390] env[63028]: DEBUG nova.virt.hardware [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 984.016369] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4687c244-89c7-41d7-83d7-d61018448f77 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.024707] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8a57af-8fc3-4d64-aa4c-3bdc34e5dd3f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.095076] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735985, 'name': ReconfigVM_Task, 'duration_secs': 0.505078} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.095466] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Reconfigured VM instance instance-00000059 to attach disk [datastore1] c0693e4c-30b2-4eda-be1e-f6186d78038b/c0693e4c-30b2-4eda-be1e-f6186d78038b.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 984.096126] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-171a6cf4-4b46-444f-a2f2-08a8b6a47fa9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.102750] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 984.102750] env[63028]: value = "task-2735987" [ 984.102750] env[63028]: _type = "Task" [ 984.102750] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.113892] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735987, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.207725] env[63028]: DEBUG nova.network.neutron [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updating instance_info_cache with network_info: [{"id": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "address": "fa:16:3e:90:60:0c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9efd2ef2-d3", "ovs_interfaceid": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98722f90-f2d2-4a4a-9e68-ad5c32b18435", "address": "fa:16:3e:ab:1c:75", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98722f90-f2", "ovs_interfaceid": "98722f90-f2d2-4a4a-9e68-ad5c32b18435", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.227546] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5216aa29-b88a-93c5-6fc5-d86bf92e0dc0, 'name': SearchDatastore_Task, 'duration_secs': 0.026665} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.228592] env[63028]: DEBUG nova.compute.utils [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 984.233020] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36004151-d584-4475-bc19-2630c5db5d97 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.234367] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 984.235253] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f9fa00-82d8-4d79-8052-2f5fbc87b916 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.239134] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 984.239134] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521306e2-8404-2a30-e51c-7bc2a8549c89" [ 984.239134] env[63028]: _type = "Task" [ 984.239134] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.246042] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 984.246644] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82c1e844-a2d4-4fd6-a894-7d29822613e9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.251911] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521306e2-8404-2a30-e51c-7bc2a8549c89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.312283] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 984.312589] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 984.312836] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleting the datastore file [datastore2] 85aafadb-81d6-4687-aed1-fbe829e5f95f {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 984.313199] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d504f25-9671-4e9f-8ea4-b34611c16129 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.321033] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 984.321033] env[63028]: value = "task-2735989" [ 984.321033] env[63028]: _type = "Task" [ 984.321033] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.329555] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735989, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.385128] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735986, 'name': Rename_Task, 'duration_secs': 0.166869} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.385423] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 984.385722] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-255a8263-ebf2-46d4-9323-db1fbe512a2b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.391964] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 984.391964] env[63028]: value = "task-2735990" [ 984.391964] env[63028]: _type = "Task" [ 984.391964] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.400158] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735990, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.546636] env[63028]: DEBUG oslo_concurrency.lockutils [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.607s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.549545] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.293s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.549851] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.552320] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.744s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.552722] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.555424] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.470s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.557018] env[63028]: INFO nova.compute.claims [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 984.585966] env[63028]: INFO nova.scheduler.client.report [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Deleted allocations for instance 672695c2-06f3-4790-a459-4b575baf29d3 [ 984.617535] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735987, 'name': Rename_Task, 'duration_secs': 0.148041} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.617535] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 984.617535] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2615380-a366-480b-a9f7-9baef06adb0e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.622949] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 984.622949] env[63028]: value = "task-2735991" [ 984.622949] env[63028]: _type = "Task" [ 984.622949] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.631573] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735991, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.663135] env[63028]: DEBUG nova.network.neutron [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Successfully updated port: c5f1d585-d624-4525-a5b2-132b18bf9378 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 984.711329] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.712019] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.712216] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.713090] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed454e3-44b3-4ab5-815e-646ce9824738 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.733271] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.734181] env[63028]: DEBUG nova.virt.hardware [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 984.734402] env[63028]: DEBUG nova.virt.hardware [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 984.734563] env[63028]: DEBUG nova.virt.hardware [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 984.734747] env[63028]: DEBUG nova.virt.hardware [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 984.734894] env[63028]: DEBUG nova.virt.hardware [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 984.735055] env[63028]: DEBUG nova.virt.hardware [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 984.735266] env[63028]: DEBUG nova.virt.hardware [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 984.735423] env[63028]: DEBUG nova.virt.hardware [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 984.735587] env[63028]: DEBUG nova.virt.hardware [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 984.735756] env[63028]: DEBUG nova.virt.hardware [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 984.735965] env[63028]: DEBUG nova.virt.hardware [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 984.742209] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Reconfiguring VM to attach interface {{(pid=63028) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 984.742971] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43ec89b2-3ae6-4b1d-8d57-f964fa560642 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.769102] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521306e2-8404-2a30-e51c-7bc2a8549c89, 'name': SearchDatastore_Task, 'duration_secs': 0.019661} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.770589] env[63028]: DEBUG oslo_concurrency.lockutils [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.770927] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 46dc76bc-854f-46ad-9db5-21cf6f40fb21/46dc76bc-854f-46ad-9db5-21cf6f40fb21.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 984.771283] env[63028]: DEBUG oslo_vmware.api [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 984.771283] env[63028]: value = "task-2735992" [ 984.771283] env[63028]: _type = "Task" [ 984.771283] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.771575] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a3a3caa-3db8-43ac-9f5a-b436b71436de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.782440] env[63028]: DEBUG oslo_vmware.api [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735992, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.784892] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 984.784892] env[63028]: value = "task-2735993" [ 984.784892] env[63028]: _type = "Task" [ 984.784892] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.793037] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735993, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.830793] env[63028]: DEBUG oslo_vmware.api [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2735989, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20415} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.831237] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.831237] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 984.831440] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 984.850709] env[63028]: INFO nova.scheduler.client.report [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleted allocations for instance 85aafadb-81d6-4687-aed1-fbe829e5f95f [ 984.903219] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735990, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.070648] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6e8c6f51-983a-4bea-9d02-9ada98e80f7c tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "63524cd8-81de-419f-bb07-0326f3cb393f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 42.481s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.071848] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "63524cd8-81de-419f-bb07-0326f3cb393f" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 19.092s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.072114] env[63028]: INFO nova.compute.manager [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Unshelving [ 985.094881] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2daee28-2f1e-4c7b-96d5-697df33b41b5 tempest-ImagesTestJSON-852617634 tempest-ImagesTestJSON-852617634-project-member] Lock "672695c2-06f3-4790-a459-4b575baf29d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.055s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.131282] env[63028]: INFO nova.scheduler.client.report [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted allocation for migration 8ac4afc4-5919-47ff-9cce-ace0d06a50ba [ 985.139565] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735991, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.154512] env[63028]: DEBUG oslo_vmware.rw_handles [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d888c5-5bc4-74ca-e5fe-a3c4c5c7bf40/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 985.155598] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67623cd-d7b1-41a1-a255-c511febd2099 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.163461] env[63028]: DEBUG oslo_vmware.rw_handles [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d888c5-5bc4-74ca-e5fe-a3c4c5c7bf40/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 985.163838] env[63028]: ERROR oslo_vmware.rw_handles [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d888c5-5bc4-74ca-e5fe-a3c4c5c7bf40/disk-0.vmdk due to incomplete transfer. [ 985.164213] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d1ed45f7-aa2d-4de6-a8a8-d2dbdb6bfca4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.168398] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.168474] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.168686] env[63028]: DEBUG nova.network.neutron [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 985.173105] env[63028]: DEBUG oslo_vmware.rw_handles [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d888c5-5bc4-74ca-e5fe-a3c4c5c7bf40/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 985.173353] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Uploaded image 6d24fdbe-d9bb-49de-8413-30e0956a718a to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 985.175607] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 985.175924] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b7a8df26-dcea-46d6-9dee-d0a1516d7a14 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.183477] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 985.183477] env[63028]: value = "task-2735994" [ 985.183477] env[63028]: _type = "Task" [ 985.183477] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.194486] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735994, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.284474] env[63028]: DEBUG oslo_vmware.api [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.296149] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735993, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.356281] env[63028]: DEBUG oslo_concurrency.lockutils [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.402891] env[63028]: DEBUG oslo_vmware.api [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2735990, 'name': PowerOnVM_Task, 'duration_secs': 0.55251} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.402891] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 985.403159] env[63028]: INFO nova.compute.manager [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Took 10.76 seconds to spawn the instance on the hypervisor. [ 985.403198] env[63028]: DEBUG nova.compute.manager [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 985.404018] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a29c5e-302f-46a0-bfaf-e735dc1de179 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.641872] env[63028]: DEBUG oslo_concurrency.lockutils [None req-580ecdb3-2bd1-4068-afc1-9df515addc7f tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 29.971s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.643492] env[63028]: DEBUG oslo_vmware.api [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2735991, 'name': PowerOnVM_Task, 'duration_secs': 0.672318} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.645789] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 985.646015] env[63028]: INFO nova.compute.manager [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Took 8.89 seconds to spawn the instance on the hypervisor. [ 985.646228] env[63028]: DEBUG nova.compute.manager [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 985.647213] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b4a153-83b5-4a11-8509-859e5de67959 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.695179] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735994, 'name': Destroy_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.705790] env[63028]: DEBUG nova.network.neutron [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 985.787750] env[63028]: DEBUG oslo_vmware.api [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2735992, 'name': ReconfigVM_Task, 'duration_secs': 0.770955} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.790684] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.790978] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Reconfigured VM to attach interface {{(pid=63028) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 985.801115] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735993, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61381} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.801451] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 46dc76bc-854f-46ad-9db5-21cf6f40fb21/46dc76bc-854f-46ad-9db5-21cf6f40fb21.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 985.801571] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 985.801824] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41c50949-b9d2-4a1e-8fa0-d05f42404dc0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.810725] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 985.810725] env[63028]: value = "task-2735995" [ 985.810725] env[63028]: _type = "Task" [ 985.810725] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.816298] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.816502] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.816726] env[63028]: INFO nova.compute.manager [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Attaching volume fff41433-1dbe-4075-9b8b-6bae1342802a to /dev/sdb [ 985.825498] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735995, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.858341] env[63028]: DEBUG nova.network.neutron [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance_info_cache with network_info: [{"id": "c5f1d585-d624-4525-a5b2-132b18bf9378", "address": "fa:16:3e:93:da:98", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5f1d585-d6", "ovs_interfaceid": "c5f1d585-d624-4525-a5b2-132b18bf9378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.861444] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb6c0ed-d29a-4d90-a28a-4c2d331a2981 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.872222] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883b6ff4-810d-40af-9e1e-b7c84de9d715 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.886401] env[63028]: DEBUG nova.virt.block_device [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updating existing volume attachment record: 02066e67-c552-43bb-97a7-ad5fb5576827 {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 985.900891] env[63028]: DEBUG nova.compute.manager [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Received event network-changed-de1f8509-63e1-41ae-ad48-03c4e8b74871 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 985.900891] env[63028]: DEBUG nova.compute.manager [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Refreshing instance network info cache due to event network-changed-de1f8509-63e1-41ae-ad48-03c4e8b74871. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 985.901240] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] Acquiring lock "refresh_cache-56d6982d-9f76-4952-8c8b-f64b3c8d02fe" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.901419] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] Acquired lock "refresh_cache-56d6982d-9f76-4952-8c8b-f64b3c8d02fe" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.901419] env[63028]: DEBUG nova.network.neutron [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Refreshing network info cache for port de1f8509-63e1-41ae-ad48-03c4e8b74871 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 985.925444] env[63028]: INFO nova.compute.manager [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Took 41.03 seconds to build instance. [ 985.935281] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5201e212-4000-4de1-b007-2d4880e20380 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.939102] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.939363] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.939711] env[63028]: INFO nova.compute.manager [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Rebooting instance [ 985.948480] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8b4724-e034-4554-9c72-620da111e398 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.981401] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119b250a-ebcf-450b-a41b-a1477b79738a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.988810] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab947433-0a76-4edb-ab94-329d8a18f153 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.002522] env[63028]: DEBUG nova.compute.provider_tree [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.095826] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.168637] env[63028]: INFO nova.compute.manager [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Took 35.64 seconds to build instance. [ 986.196451] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735994, 'name': Destroy_Task, 'duration_secs': 0.740719} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.196708] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Destroyed the VM [ 986.197043] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 986.197201] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-24c4f325-75fa-4875-b516-8006be096e2b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.203403] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 986.203403] env[63028]: value = "task-2735999" [ 986.203403] env[63028]: _type = "Task" [ 986.203403] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.213294] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735999, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.297673] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42d96fef-1c11-4e96-a2d8-a9fca09f962b tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-b77ba7d6-305e-4b60-a4b7-9353c12c3920-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.912s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.325056] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2735995, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067682} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.325367] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 986.326158] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f193c750-1757-4aae-9ad1-33397585a347 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.348402] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 46dc76bc-854f-46ad-9db5-21cf6f40fb21/46dc76bc-854f-46ad-9db5-21cf6f40fb21.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 986.348707] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a7833d1-1b34-4497-97d2-a37f4a1e86b9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.368855] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.369241] env[63028]: DEBUG nova.compute.manager [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Instance network_info: |[{"id": "c5f1d585-d624-4525-a5b2-132b18bf9378", "address": "fa:16:3e:93:da:98", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5f1d585-d6", "ovs_interfaceid": "c5f1d585-d624-4525-a5b2-132b18bf9378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 986.369618] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 986.369618] env[63028]: value = "task-2736000" [ 986.369618] env[63028]: _type = "Task" [ 986.369618] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.369974] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:da:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4df917f7-847a-4c0e-b0e3-69a52e4a1554', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5f1d585-d624-4525-a5b2-132b18bf9378', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.377671] env[63028]: DEBUG oslo.service.loopingcall [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 986.377969] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 986.381218] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-63d16906-59fe-43e5-ae06-1daa5fc1c709 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.400824] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736000, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.402115] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.402115] env[63028]: value = "task-2736001" [ 986.402115] env[63028]: _type = "Task" [ 986.402115] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.411466] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736001, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.427717] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5a58b678-dd27-4716-b720-edf6a180012d tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "c386c117-e255-4c3b-9a37-011e517277de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.540s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.463216] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.463411] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.463650] env[63028]: DEBUG nova.network.neutron [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 986.505811] env[63028]: DEBUG nova.scheduler.client.report [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 986.634469] env[63028]: DEBUG nova.network.neutron [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Updated VIF entry in instance network info cache for port de1f8509-63e1-41ae-ad48-03c4e8b74871. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 986.634858] env[63028]: DEBUG nova.network.neutron [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Updating instance_info_cache with network_info: [{"id": "de1f8509-63e1-41ae-ad48-03c4e8b74871", "address": "fa:16:3e:4e:c3:80", "network": {"id": "8fd5dd35-416e-45e2-aea5-8c2c22752ef7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1819490475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae448aa28a84aa6863fffc24a5448fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde1f8509-63", "ovs_interfaceid": "de1f8509-63e1-41ae-ad48-03c4e8b74871", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.671294] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a617f197-03a9-4ac7-af76-ff750a350c76 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "c0693e4c-30b2-4eda-be1e-f6186d78038b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.155s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.717755] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735999, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.806256] env[63028]: DEBUG nova.compute.manager [req-5ceb210f-c214-4270-8128-37511426c2c6 req-4f7c294d-90e3-4fd6-9f40-0ff99a317b18 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Received event network-vif-unplugged-2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 986.807046] env[63028]: DEBUG oslo_concurrency.lockutils [req-5ceb210f-c214-4270-8128-37511426c2c6 req-4f7c294d-90e3-4fd6-9f40-0ff99a317b18 service nova] Acquiring lock "85aafadb-81d6-4687-aed1-fbe829e5f95f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.807046] env[63028]: DEBUG oslo_concurrency.lockutils [req-5ceb210f-c214-4270-8128-37511426c2c6 req-4f7c294d-90e3-4fd6-9f40-0ff99a317b18 service nova] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.807046] env[63028]: DEBUG oslo_concurrency.lockutils [req-5ceb210f-c214-4270-8128-37511426c2c6 req-4f7c294d-90e3-4fd6-9f40-0ff99a317b18 service nova] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.807351] env[63028]: DEBUG nova.compute.manager [req-5ceb210f-c214-4270-8128-37511426c2c6 req-4f7c294d-90e3-4fd6-9f40-0ff99a317b18 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] No waiting events found dispatching network-vif-unplugged-2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 986.807351] env[63028]: WARNING nova.compute.manager [req-5ceb210f-c214-4270-8128-37511426c2c6 req-4f7c294d-90e3-4fd6-9f40-0ff99a317b18 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Received unexpected event network-vif-unplugged-2e2d8403-826c-4e24-ba3c-123d444d1fdc for instance with vm_state shelved_offloaded and task_state None. [ 986.891222] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736000, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.911052] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736001, 'name': CreateVM_Task, 'duration_secs': 0.465013} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.911225] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 986.912011] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.912192] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.912556] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 986.913198] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c08bcf8-49ba-426a-a734-e9c9ba723f32 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.919885] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 986.919885] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522078d3-77e4-8566-8250-5c1eaf4b860d" [ 986.919885] env[63028]: _type = "Task" [ 986.919885] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.928048] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522078d3-77e4-8566-8250-5c1eaf4b860d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.010784] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.455s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.011332] env[63028]: DEBUG nova.compute.manager [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 987.014066] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.654s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.014273] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.016302] env[63028]: DEBUG oslo_concurrency.lockutils [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.076s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.016453] env[63028]: DEBUG oslo_concurrency.lockutils [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.018174] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.619s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.018364] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.020511] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.308s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.021921] env[63028]: INFO nova.compute.claims [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 987.051155] env[63028]: INFO nova.scheduler.client.report [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Deleted allocations for instance bb2b405e-6207-4718-9485-0271d26c160f [ 987.061047] env[63028]: INFO nova.scheduler.client.report [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Deleted allocations for instance ba57ed92-aaef-460c-bd45-d0cbe09e4615 [ 987.078512] env[63028]: INFO nova.scheduler.client.report [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Deleted allocations for instance 7e914e49-0d70-4024-940b-ad2a15e9dff7 [ 987.140923] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] Releasing lock "refresh_cache-56d6982d-9f76-4952-8c8b-f64b3c8d02fe" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.141236] env[63028]: DEBUG nova.compute.manager [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received event network-vif-plugged-98722f90-f2d2-4a4a-9e68-ad5c32b18435 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 987.141431] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] Acquiring lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.141699] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] Lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.141894] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] Lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.142080] env[63028]: DEBUG nova.compute.manager [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] No waiting events found dispatching network-vif-plugged-98722f90-f2d2-4a4a-9e68-ad5c32b18435 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 987.142258] env[63028]: WARNING nova.compute.manager [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received unexpected event network-vif-plugged-98722f90-f2d2-4a4a-9e68-ad5c32b18435 for instance with vm_state active and task_state None. [ 987.142425] env[63028]: DEBUG nova.compute.manager [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received event network-changed-98722f90-f2d2-4a4a-9e68-ad5c32b18435 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 987.142586] env[63028]: DEBUG nova.compute.manager [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Refreshing instance network info cache due to event network-changed-98722f90-f2d2-4a4a-9e68-ad5c32b18435. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 987.142812] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] Acquiring lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.142957] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] Acquired lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.143130] env[63028]: DEBUG nova.network.neutron [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Refreshing network info cache for port 98722f90-f2d2-4a4a-9e68-ad5c32b18435 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 987.215227] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2735999, 'name': RemoveSnapshot_Task, 'duration_secs': 0.648818} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.215523] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 987.216083] env[63028]: DEBUG nova.compute.manager [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 987.216639] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726b43b1-3eb1-4367-8e98-37022cba7cf4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.390961] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736000, 'name': ReconfigVM_Task, 'duration_secs': 0.52421} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.395366] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 46dc76bc-854f-46ad-9db5-21cf6f40fb21/46dc76bc-854f-46ad-9db5-21cf6f40fb21.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.397812] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_secret_uuid': None, 'guest_format': None, 'size': 0, 'disk_bus': None, 'encryption_format': None, 'boot_index': 0, 'device_name': '/dev/sda', 'device_type': 'disk', 'encrypted': False, 'encryption_options': None, 'image_id': 'f2ba2026-3f4b-431c-97c1-c4ba582a9907'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'guest_format': None, 'disk_bus': None, 'mount_device': '/dev/sdb', 'attachment_id': '08e66ea2-3cb4-4b0c-a6b3-9699ddfc4390', 'boot_index': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550816', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'name': 'volume-15326c33-7e0b-41be-bf2e-5b82153cea0d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '46dc76bc-854f-46ad-9db5-21cf6f40fb21', 'attached_at': '', 'detached_at': '', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'serial': '15326c33-7e0b-41be-bf2e-5b82153cea0d'}, 'volume_type': None}], 'swap': None} {{(pid=63028) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 987.398041] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 987.398243] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550816', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'name': 'volume-15326c33-7e0b-41be-bf2e-5b82153cea0d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '46dc76bc-854f-46ad-9db5-21cf6f40fb21', 'attached_at': '', 'detached_at': '', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'serial': '15326c33-7e0b-41be-bf2e-5b82153cea0d'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 987.399552] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48252608-157d-4ec1-8c14-3faa7514e179 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.421794] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2192d0dc-57d3-4839-b9e3-5b578c93af77 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.432951] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522078d3-77e4-8566-8250-5c1eaf4b860d, 'name': SearchDatastore_Task, 'duration_secs': 0.011908} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.446794] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.447214] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.447450] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.447598] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.447779] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.455242] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] volume-15326c33-7e0b-41be-bf2e-5b82153cea0d/volume-15326c33-7e0b-41be-bf2e-5b82153cea0d.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 987.455528] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-582274ee-1f54-4d07-bfd0-866be1f7dbcb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.457428] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-110f0ee5-30b3-4a25-b2a7-eb74bd3bf063 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.476218] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 987.476218] env[63028]: value = "task-2736002" [ 987.476218] env[63028]: _type = "Task" [ 987.476218] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.483134] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.483410] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 987.484879] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b03a2ff0-669d-4ecd-af83-ab0bd3248af6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.491055] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736002, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.494312] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 987.494312] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52625eb9-a3b7-f6fd-03ae-4c8571b627fe" [ 987.494312] env[63028]: _type = "Task" [ 987.494312] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.502512] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52625eb9-a3b7-f6fd-03ae-4c8571b627fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.521345] env[63028]: DEBUG nova.network.neutron [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance_info_cache with network_info: [{"id": "6ecb125b-389c-4dce-8446-368a7298e497", "address": "fa:16:3e:f4:06:c4", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecb125b-38", "ovs_interfaceid": "6ecb125b-389c-4dce-8446-368a7298e497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.527155] env[63028]: DEBUG nova.compute.utils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 987.530933] env[63028]: DEBUG nova.compute.manager [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 987.531070] env[63028]: DEBUG nova.network.neutron [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 987.559369] env[63028]: DEBUG oslo_concurrency.lockutils [None req-217f2845-2515-4130-b1d0-de641f56c44e tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "bb2b405e-6207-4718-9485-0271d26c160f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.319s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.569911] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6c9f5b6d-d6b4-4ed0-994c-55d12d9c1a85 tempest-ServersAdminTestJSON-462896532 tempest-ServersAdminTestJSON-462896532-project-member] Lock "ba57ed92-aaef-460c-bd45-d0cbe09e4615" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.682s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.586832] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8914f4fd-23d9-41ba-bde1-faf65d9122a3 tempest-ServersTestJSON-831221344 tempest-ServersTestJSON-831221344-project-member] Lock "7e914e49-0d70-4024-940b-ad2a15e9dff7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.982s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.612589] env[63028]: DEBUG nova.policy [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2dfde114aa1c4d7bbccbe92eabfdd811', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b7b16d4124ad42aeb35b3ec3ebe1b92b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 987.731375] env[63028]: INFO nova.compute.manager [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Shelve offloading [ 987.990306] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736002, 'name': ReconfigVM_Task, 'duration_secs': 0.309451} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.990681] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Reconfigured VM instance instance-0000004e to attach disk [datastore2] volume-15326c33-7e0b-41be-bf2e-5b82153cea0d/volume-15326c33-7e0b-41be-bf2e-5b82153cea0d.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.997626] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d126a0a-b845-4d17-94ff-9248298e9d61 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.020193] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52625eb9-a3b7-f6fd-03ae-4c8571b627fe, 'name': SearchDatastore_Task, 'duration_secs': 0.013411} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.022269] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 988.022269] env[63028]: value = "task-2736003" [ 988.022269] env[63028]: _type = "Task" [ 988.022269] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.022541] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-589db325-48b7-43bc-b841-e8ab27ca3daf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.025926] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.031147] env[63028]: DEBUG nova.compute.manager [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 988.041273] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 988.041273] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52710fa4-ff68-e9f0-2bce-977fcab84d5d" [ 988.041273] env[63028]: _type = "Task" [ 988.041273] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.046476] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736003, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.060402] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52710fa4-ff68-e9f0-2bce-977fcab84d5d, 'name': SearchDatastore_Task, 'duration_secs': 0.013847} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.060402] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.060402] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] d41a1eae-bb89-4222-9466-d86af891c654/d41a1eae-bb89-4222-9466-d86af891c654.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 988.060402] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bceab654-9f7d-4f30-b3ae-3f00ab88ebc4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.067830] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 988.067830] env[63028]: value = "task-2736004" [ 988.067830] env[63028]: _type = "Task" [ 988.067830] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.078466] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.153226] env[63028]: DEBUG nova.network.neutron [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updated VIF entry in instance network info cache for port 98722f90-f2d2-4a4a-9e68-ad5c32b18435. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 988.153226] env[63028]: DEBUG nova.network.neutron [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updating instance_info_cache with network_info: [{"id": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "address": "fa:16:3e:90:60:0c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9efd2ef2-d3", "ovs_interfaceid": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98722f90-f2d2-4a4a-9e68-ad5c32b18435", "address": "fa:16:3e:ab:1c:75", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98722f90-f2", "ovs_interfaceid": "98722f90-f2d2-4a4a-9e68-ad5c32b18435", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.197109] env[63028]: INFO nova.compute.manager [None req-b1acdeee-bc7b-42e4-acde-7e3ebee51b4f tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Get console output [ 988.197664] env[63028]: WARNING nova.virt.vmwareapi.driver [None req-b1acdeee-bc7b-42e4-acde-7e3ebee51b4f tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] The console log is missing. Check your VSPC configuration [ 988.239018] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 988.239833] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ff9a53b-752f-4427-b827-a8afa4a464e2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.247229] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 988.247229] env[63028]: value = "task-2736006" [ 988.247229] env[63028]: _type = "Task" [ 988.247229] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.258574] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 988.258704] env[63028]: DEBUG nova.compute.manager [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 988.259760] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2582693e-44e7-41f5-bfe8-1fdb2cf4cf4c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.267160] env[63028]: DEBUG oslo_concurrency.lockutils [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "refresh_cache-ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.267160] env[63028]: DEBUG oslo_concurrency.lockutils [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "refresh_cache-ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.267370] env[63028]: DEBUG nova.network.neutron [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.393655] env[63028]: DEBUG nova.network.neutron [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Successfully created port: eb3a6303-8ef2-4d1d-bcbc-58838b430d84 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 988.476126] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Acquiring lock "455578fa-7468-40dc-8c0a-37ac35e5c0a0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.476126] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Lock "455578fa-7468-40dc-8c0a-37ac35e5c0a0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.476126] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Acquiring lock "455578fa-7468-40dc-8c0a-37ac35e5c0a0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.476126] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Lock "455578fa-7468-40dc-8c0a-37ac35e5c0a0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.476126] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Lock "455578fa-7468-40dc-8c0a-37ac35e5c0a0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.479873] env[63028]: INFO nova.compute.manager [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Terminating instance [ 988.486879] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff175684-3734-4491-84f7-7ad8a7476d97 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.495705] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ae98c2-08e6-4057-bbcd-be0e988921f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.531949] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4792f56-8802-43f9-ac14-54b61c77db4c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.541019] env[63028]: DEBUG nova.compute.manager [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 988.543162] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb04a96-6e51-463e-9bc0-4b1a5c90ff39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.550805] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736003, 'name': ReconfigVM_Task, 'duration_secs': 0.204637} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.554138] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550816', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'name': 'volume-15326c33-7e0b-41be-bf2e-5b82153cea0d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '46dc76bc-854f-46ad-9db5-21cf6f40fb21', 'attached_at': '', 'detached_at': '', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'serial': '15326c33-7e0b-41be-bf2e-5b82153cea0d'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 988.554872] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9452f162-61ac-404c-b994-f9a1f5c30e98 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.557689] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f47f9c4-a582-4119-9f28-2f45e62d2ef9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.577056] env[63028]: DEBUG nova.compute.provider_tree [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.584146] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 988.584146] env[63028]: value = "task-2736007" [ 988.584146] env[63028]: _type = "Task" [ 988.584146] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.592902] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736004, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.597862] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736007, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.657544] env[63028]: DEBUG oslo_concurrency.lockutils [req-0a8a2ab4-ec8f-4d07-a35d-cbff333d2790 req-3fdb61ab-06f4-4c37-a6c4-39d84325b9a6 service nova] Releasing lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.733304] env[63028]: DEBUG nova.compute.manager [req-616cbaa6-7b0f-4870-be56-306eabd3c567 req-b5c92046-7d0f-4f24-a27d-84a49230f048 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Received event network-changed-de1f8509-63e1-41ae-ad48-03c4e8b74871 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 988.733304] env[63028]: DEBUG nova.compute.manager [req-616cbaa6-7b0f-4870-be56-306eabd3c567 req-b5c92046-7d0f-4f24-a27d-84a49230f048 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Refreshing instance network info cache due to event network-changed-de1f8509-63e1-41ae-ad48-03c4e8b74871. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 988.733304] env[63028]: DEBUG oslo_concurrency.lockutils [req-616cbaa6-7b0f-4870-be56-306eabd3c567 req-b5c92046-7d0f-4f24-a27d-84a49230f048 service nova] Acquiring lock "refresh_cache-56d6982d-9f76-4952-8c8b-f64b3c8d02fe" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.733304] env[63028]: DEBUG oslo_concurrency.lockutils [req-616cbaa6-7b0f-4870-be56-306eabd3c567 req-b5c92046-7d0f-4f24-a27d-84a49230f048 service nova] Acquired lock "refresh_cache-56d6982d-9f76-4952-8c8b-f64b3c8d02fe" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.733304] env[63028]: DEBUG nova.network.neutron [req-616cbaa6-7b0f-4870-be56-306eabd3c567 req-b5c92046-7d0f-4f24-a27d-84a49230f048 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Refreshing network info cache for port de1f8509-63e1-41ae-ad48-03c4e8b74871 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 988.986835] env[63028]: DEBUG nova.compute.manager [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 988.988932] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 988.990119] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8109fb6-a578-4889-bf19-4d17211692e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.998419] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 988.998691] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27b4cdb7-7893-470c-b846-a688c1d38db7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.007108] env[63028]: DEBUG oslo_vmware.api [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Waiting for the task: (returnval){ [ 989.007108] env[63028]: value = "task-2736008" [ 989.007108] env[63028]: _type = "Task" [ 989.007108] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.015282] env[63028]: DEBUG oslo_vmware.api [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2736008, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.051712] env[63028]: DEBUG nova.compute.manager [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 989.095847] env[63028]: DEBUG nova.virt.hardware [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 989.095847] env[63028]: DEBUG nova.virt.hardware [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.095847] env[63028]: DEBUG nova.virt.hardware [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 989.095847] env[63028]: DEBUG nova.virt.hardware [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.095847] env[63028]: DEBUG nova.virt.hardware [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 989.095847] env[63028]: DEBUG nova.virt.hardware [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 989.096601] env[63028]: DEBUG nova.virt.hardware [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 989.096601] env[63028]: DEBUG nova.virt.hardware [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 989.096601] env[63028]: DEBUG nova.virt.hardware [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 989.096763] env[63028]: DEBUG nova.virt.hardware [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 989.097065] env[63028]: DEBUG nova.virt.hardware [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 989.098086] env[63028]: DEBUG nova.scheduler.client.report [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 989.102848] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999fa7aa-f5fc-4a6b-99dd-5eb8c4ad593c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.125182] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736007, 'name': Rename_Task, 'duration_secs': 0.249497} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.125438] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736004, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.632075} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.132020] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 989.132020] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] d41a1eae-bb89-4222-9466-d86af891c654/d41a1eae-bb89-4222-9466-d86af891c654.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 989.132020] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 989.133083] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3792cbe3-1a61-4df9-aa1e-824f7c5303de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.134878] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08015834-5cb4-47aa-8d40-ebc4f5095674 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.138817] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0528f1-69f3-4a5e-a90b-d4146f152cd7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.157051] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 989.157051] env[63028]: value = "task-2736010" [ 989.157051] env[63028]: _type = "Task" [ 989.157051] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.157316] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 989.157316] env[63028]: value = "task-2736009" [ 989.157316] env[63028]: _type = "Task" [ 989.157316] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.169204] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736010, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.172432] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736009, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.263301] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "c386c117-e255-4c3b-9a37-011e517277de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.263301] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "c386c117-e255-4c3b-9a37-011e517277de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.263466] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "c386c117-e255-4c3b-9a37-011e517277de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.263517] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "c386c117-e255-4c3b-9a37-011e517277de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.263701] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "c386c117-e255-4c3b-9a37-011e517277de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.265951] env[63028]: INFO nova.compute.manager [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Terminating instance [ 989.333308] env[63028]: DEBUG nova.network.neutron [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Updating instance_info_cache with network_info: [{"id": "db82c13e-74f6-431e-9184-2375c4a0bbbc", "address": "fa:16:3e:1d:fe:e0", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb82c13e-74", "ovs_interfaceid": "db82c13e-74f6-431e-9184-2375c4a0bbbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.519886] env[63028]: DEBUG oslo_vmware.api [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2736008, 'name': PowerOffVM_Task, 'duration_secs': 0.446746} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.521122] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.521232] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 989.526764] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-974a66d3-e211-4eca-9e32-e7122b3180a5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.579910] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5031da-013b-4d47-b985-f3511cca4882 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.588133] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Doing hard reboot of VM {{(pid=63028) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 989.591682] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-b9091d44-f9fe-43e1-83d1-bf7603843be6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.597464] env[63028]: DEBUG oslo_vmware.api [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 989.597464] env[63028]: value = "task-2736012" [ 989.597464] env[63028]: _type = "Task" [ 989.597464] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.608022] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.608022] env[63028]: DEBUG oslo_vmware.api [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736012, 'name': ResetVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.608282] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.609214] env[63028]: DEBUG nova.compute.manager [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 989.611987] env[63028]: DEBUG oslo_concurrency.lockutils [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.256s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.612264] env[63028]: DEBUG nova.objects.instance [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lazy-loading 'resources' on Instance uuid 85aafadb-81d6-4687-aed1-fbe829e5f95f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 989.621387] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 989.621785] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 989.621785] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Deleting the datastore file [datastore2] 455578fa-7468-40dc-8c0a-37ac35e5c0a0 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 989.622294] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac3cedb0-6be4-4e83-9252-34684a3f60d3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.630752] env[63028]: DEBUG oslo_vmware.api [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Waiting for the task: (returnval){ [ 989.630752] env[63028]: value = "task-2736013" [ 989.630752] env[63028]: _type = "Task" [ 989.630752] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.642100] env[63028]: DEBUG oslo_vmware.api [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2736013, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.674894] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736010, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074686} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.679181] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 989.679469] env[63028]: DEBUG oslo_vmware.api [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736009, 'name': PowerOnVM_Task, 'duration_secs': 0.518882} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.680906] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377817e6-b97d-4d34-b31e-cc38548ca22c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.683970] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 989.684333] env[63028]: DEBUG nova.compute.manager [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 989.685261] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbcc531-1297-43e0-9d42-0f59944e263d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.711813] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] d41a1eae-bb89-4222-9466-d86af891c654/d41a1eae-bb89-4222-9466-d86af891c654.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.714036] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b49a9a04-4b59-4948-aaca-33d7bef67204 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.733247] env[63028]: DEBUG nova.network.neutron [req-616cbaa6-7b0f-4870-be56-306eabd3c567 req-b5c92046-7d0f-4f24-a27d-84a49230f048 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Updated VIF entry in instance network info cache for port de1f8509-63e1-41ae-ad48-03c4e8b74871. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 989.733247] env[63028]: DEBUG nova.network.neutron [req-616cbaa6-7b0f-4870-be56-306eabd3c567 req-b5c92046-7d0f-4f24-a27d-84a49230f048 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Updating instance_info_cache with network_info: [{"id": "de1f8509-63e1-41ae-ad48-03c4e8b74871", "address": "fa:16:3e:4e:c3:80", "network": {"id": "8fd5dd35-416e-45e2-aea5-8c2c22752ef7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1819490475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae448aa28a84aa6863fffc24a5448fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde1f8509-63", "ovs_interfaceid": "de1f8509-63e1-41ae-ad48-03c4e8b74871", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.738756] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 989.738756] env[63028]: value = "task-2736014" [ 989.738756] env[63028]: _type = "Task" [ 989.738756] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.747752] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736014, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.772485] env[63028]: DEBUG nova.compute.manager [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 989.772485] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.772631] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8345b5-8317-4a5a-9df0-66d7c26dd62c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.781537] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.781795] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-894863cb-a2d0-4453-8fc7-fd3224375e96 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.787647] env[63028]: DEBUG oslo_vmware.api [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 989.787647] env[63028]: value = "task-2736015" [ 989.787647] env[63028]: _type = "Task" [ 989.787647] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.796365] env[63028]: DEBUG oslo_vmware.api [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736015, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.837731] env[63028]: DEBUG oslo_concurrency.lockutils [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "refresh_cache-ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.895025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "interface-b77ba7d6-305e-4b60-a4b7-9353c12c3920-d0308a48-57ab-41f7-bbab-6871ed89c5f2" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.895025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-b77ba7d6-305e-4b60-a4b7-9353c12c3920-d0308a48-57ab-41f7-bbab-6871ed89c5f2" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.895025] env[63028]: DEBUG nova.objects.instance [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'flavor' on Instance uuid b77ba7d6-305e-4b60-a4b7-9353c12c3920 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.107724] env[63028]: DEBUG oslo_vmware.api [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736012, 'name': ResetVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.115832] env[63028]: DEBUG nova.compute.utils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 990.117324] env[63028]: DEBUG nova.compute.manager [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 990.117540] env[63028]: DEBUG nova.network.neutron [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 990.119721] env[63028]: DEBUG nova.objects.instance [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lazy-loading 'numa_topology' on Instance uuid 85aafadb-81d6-4687-aed1-fbe829e5f95f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.142167] env[63028]: DEBUG oslo_vmware.api [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Task: {'id': task-2736013, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.374754} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.142472] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.142659] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 990.142833] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 990.143338] env[63028]: INFO nova.compute.manager [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 990.143338] env[63028]: DEBUG oslo.service.loopingcall [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 990.143470] env[63028]: DEBUG nova.compute.manager [-] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 990.143521] env[63028]: DEBUG nova.network.neutron [-] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 990.219174] env[63028]: DEBUG nova.compute.manager [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Received event network-changed-2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 990.219174] env[63028]: DEBUG nova.compute.manager [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Refreshing instance network info cache due to event network-changed-2e2d8403-826c-4e24-ba3c-123d444d1fdc. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 990.219527] env[63028]: DEBUG oslo_concurrency.lockutils [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] Acquiring lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.219527] env[63028]: DEBUG oslo_concurrency.lockutils [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] Acquired lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.219527] env[63028]: DEBUG nova.network.neutron [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Refreshing network info cache for port 2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 990.227804] env[63028]: DEBUG nova.policy [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '348f1d2a0e8449118ba19c654d5290a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e09fff22285147c29cb2bb096bde3921', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 990.240273] env[63028]: DEBUG oslo_concurrency.lockutils [req-616cbaa6-7b0f-4870-be56-306eabd3c567 req-b5c92046-7d0f-4f24-a27d-84a49230f048 service nova] Releasing lock "refresh_cache-56d6982d-9f76-4952-8c8b-f64b3c8d02fe" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.243926] env[63028]: DEBUG oslo_concurrency.lockutils [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.259681] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.299650] env[63028]: DEBUG oslo_vmware.api [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736015, 'name': PowerOffVM_Task, 'duration_secs': 0.248711} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.299808] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 990.299881] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 990.300164] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c86b1475-0a52-48de-bcd7-6e6159d6da9e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.348428] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "da88308f-ce62-40af-adae-e38aa506bdd9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.348705] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "da88308f-ce62-40af-adae-e38aa506bdd9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.348912] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "da88308f-ce62-40af-adae-e38aa506bdd9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.349184] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "da88308f-ce62-40af-adae-e38aa506bdd9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.349396] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "da88308f-ce62-40af-adae-e38aa506bdd9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.352660] env[63028]: INFO nova.compute.manager [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Terminating instance [ 990.377013] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.377252] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.377432] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleting the datastore file [datastore2] c386c117-e255-4c3b-9a37-011e517277de {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.377874] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-baaaa1e8-f0bd-46a8-b6e8-78eb74722bae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.388368] env[63028]: DEBUG oslo_vmware.api [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 990.388368] env[63028]: value = "task-2736017" [ 990.388368] env[63028]: _type = "Task" [ 990.388368] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.397943] env[63028]: DEBUG oslo_vmware.api [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.434872] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 990.435140] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550831', 'volume_id': 'fff41433-1dbe-4075-9b8b-6bae1342802a', 'name': 'volume-fff41433-1dbe-4075-9b8b-6bae1342802a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8bb61bfa-d44e-4e06-867a-445d9b3db660', 'attached_at': '', 'detached_at': '', 'volume_id': 'fff41433-1dbe-4075-9b8b-6bae1342802a', 'serial': 'fff41433-1dbe-4075-9b8b-6bae1342802a'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 990.436016] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16964b8f-3c1e-41bb-a314-722d70b4936a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.454881] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747c4cc2-5158-4806-96e6-c618ad8cd864 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.487395] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] volume-fff41433-1dbe-4075-9b8b-6bae1342802a/volume-fff41433-1dbe-4075-9b8b-6bae1342802a.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 990.490976] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05735825-e0a8-44de-a903-17e787627fcd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.510396] env[63028]: DEBUG oslo_vmware.api [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 990.510396] env[63028]: value = "task-2736018" [ 990.510396] env[63028]: _type = "Task" [ 990.510396] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.520705] env[63028]: DEBUG oslo_vmware.api [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736018, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.611366] env[63028]: DEBUG oslo_vmware.api [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736012, 'name': ResetVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.622036] env[63028]: DEBUG nova.compute.manager [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 990.625940] env[63028]: DEBUG nova.objects.base [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Object Instance<85aafadb-81d6-4687-aed1-fbe829e5f95f> lazy-loaded attributes: resources,numa_topology {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 990.683258] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 990.684214] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3fcef70-71ef-4ac3-afdd-6b22afac3c63 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.692387] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 990.695262] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83fda98a-4339-4751-9620-2b7e014556ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.728493] env[63028]: DEBUG nova.network.neutron [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Successfully updated port: eb3a6303-8ef2-4d1d-bcbc-58838b430d84 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 990.759611] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736014, 'name': ReconfigVM_Task, 'duration_secs': 0.646499} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.760146] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfigured VM instance instance-0000005a to attach disk [datastore1] d41a1eae-bb89-4222-9466-d86af891c654/d41a1eae-bb89-4222-9466-d86af891c654.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.760796] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5f30b0d-ef0b-42a8-92ec-58c93cc90004 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.767234] env[63028]: DEBUG nova.objects.instance [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'pci_requests' on Instance uuid b77ba7d6-305e-4b60-a4b7-9353c12c3920 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.769700] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 990.769700] env[63028]: value = "task-2736020" [ 990.769700] env[63028]: _type = "Task" [ 990.769700] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.775213] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.775497] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.776238] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleting the datastore file [datastore2] ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.777118] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48354357-48c0-4d7a-b6ba-a9f0273946dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.783832] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736020, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.790574] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 990.790574] env[63028]: value = "task-2736021" [ 990.790574] env[63028]: _type = "Task" [ 990.790574] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.803188] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.865584] env[63028]: DEBUG nova.compute.manager [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 990.865866] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 990.872838] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518a94e9-5503-416a-a5ae-1c9a38bd22af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.888671] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 990.893134] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc7b0275-303f-4a5f-af73-c946da99bf5a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.907560] env[63028]: DEBUG oslo_vmware.api [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347718} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.910238] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.910622] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 990.910970] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 990.911334] env[63028]: INFO nova.compute.manager [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: c386c117-e255-4c3b-9a37-011e517277de] Took 1.14 seconds to destroy the instance on the hypervisor. [ 990.911764] env[63028]: DEBUG oslo.service.loopingcall [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 990.912078] env[63028]: DEBUG oslo_vmware.api [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 990.912078] env[63028]: value = "task-2736022" [ 990.912078] env[63028]: _type = "Task" [ 990.912078] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.912978] env[63028]: DEBUG nova.compute.manager [-] [instance: c386c117-e255-4c3b-9a37-011e517277de] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 990.912978] env[63028]: DEBUG nova.network.neutron [-] [instance: c386c117-e255-4c3b-9a37-011e517277de] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 991.028139] env[63028]: DEBUG oslo_vmware.api [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.068338] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39510968-416b-4fd4-b7ba-860f39929616 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.075705] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b313dd-6d56-48cc-9ccb-321b90af2cfe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.112022] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbf53db-690f-444f-8432-e392eb9df47d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.117410] env[63028]: DEBUG nova.network.neutron [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Successfully created port: 5dc30fb0-c128-49d6-a5d1-cd0f53cc9958 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.127525] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c379caa-a2a5-468e-8a20-f8536bc3df12 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.132259] env[63028]: DEBUG oslo_vmware.api [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736012, 'name': ResetVM_Task, 'duration_secs': 1.117747} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.136066] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Did hard reboot of VM {{(pid=63028) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 991.136528] env[63028]: DEBUG nova.compute.manager [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 991.138116] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5c4360-9ca0-4492-a0cf-ae1401f77707 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.154120] env[63028]: DEBUG nova.compute.provider_tree [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.231898] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Acquiring lock "refresh_cache-3fb46d02-7914-4d08-b63b-f3447ba1b81a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.232067] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Acquired lock "refresh_cache-3fb46d02-7914-4d08-b63b-f3447ba1b81a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.232395] env[63028]: DEBUG nova.network.neutron [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 991.271076] env[63028]: DEBUG nova.objects.base [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 991.271309] env[63028]: DEBUG nova.network.neutron [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 991.282207] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736020, 'name': Rename_Task, 'duration_secs': 0.18397} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.282483] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 991.282727] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a996e6c-9834-412f-b54c-f633171bd15e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.288927] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 991.288927] env[63028]: value = "task-2736023" [ 991.288927] env[63028]: _type = "Task" [ 991.288927] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.306142] env[63028]: DEBUG oslo_vmware.api [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736021, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195615} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.310418] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 991.310819] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 991.311062] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 991.313362] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736023, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.341244] env[63028]: INFO nova.scheduler.client.report [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted allocations for instance ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19 [ 991.344923] env[63028]: DEBUG nova.network.neutron [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updated VIF entry in instance network info cache for port 2e2d8403-826c-4e24-ba3c-123d444d1fdc. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 991.345276] env[63028]: DEBUG nova.network.neutron [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updating instance_info_cache with network_info: [{"id": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "address": "fa:16:3e:09:d0:a2", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap2e2d8403-82", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.367306] env[63028]: DEBUG nova.network.neutron [-] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.383445] env[63028]: DEBUG nova.policy [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b48f3f2a85945379bdb33bf153bde9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25a6457f62d149629c09589feb1a550c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 991.425477] env[63028]: DEBUG oslo_vmware.api [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736022, 'name': PowerOffVM_Task, 'duration_secs': 0.351473} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.425728] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.425937] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.427262] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff5f4784-5e4b-4595-bff6-eef9e4ddabb5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.486637] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 991.486911] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 991.487130] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Deleting the datastore file [datastore1] da88308f-ce62-40af-adae-e38aa506bdd9 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.487411] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd8c9e09-524f-4c68-8551-eb8c9bc43bef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.494282] env[63028]: DEBUG oslo_vmware.api [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 991.494282] env[63028]: value = "task-2736025" [ 991.494282] env[63028]: _type = "Task" [ 991.494282] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.505058] env[63028]: DEBUG oslo_vmware.api [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.519869] env[63028]: DEBUG oslo_vmware.api [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736018, 'name': ReconfigVM_Task, 'duration_secs': 0.591195} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.520293] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Reconfigured VM instance instance-00000050 to attach disk [datastore2] volume-fff41433-1dbe-4075-9b8b-6bae1342802a/volume-fff41433-1dbe-4075-9b8b-6bae1342802a.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.526197] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9721f54-2a27-437d-8097-348ceb52cb7f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.544260] env[63028]: DEBUG oslo_vmware.api [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 991.544260] env[63028]: value = "task-2736026" [ 991.544260] env[63028]: _type = "Task" [ 991.544260] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.555070] env[63028]: DEBUG oslo_vmware.api [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736026, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.640931] env[63028]: DEBUG nova.compute.manager [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 991.655489] env[63028]: DEBUG nova.scheduler.client.report [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 991.672523] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d36731fd-6b14-4a17-8896-8c5be755220d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.733s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.685249] env[63028]: DEBUG nova.virt.hardware [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 991.685552] env[63028]: DEBUG nova.virt.hardware [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.685708] env[63028]: DEBUG nova.virt.hardware [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 991.685883] env[63028]: DEBUG nova.virt.hardware [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.686062] env[63028]: DEBUG nova.virt.hardware [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 991.686262] env[63028]: DEBUG nova.virt.hardware [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 991.686489] env[63028]: DEBUG nova.virt.hardware [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 991.687896] env[63028]: DEBUG nova.virt.hardware [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 991.687896] env[63028]: DEBUG nova.virt.hardware [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 991.687896] env[63028]: DEBUG nova.virt.hardware [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 991.687896] env[63028]: DEBUG nova.virt.hardware [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 991.688397] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cb877e-053a-453c-8ddf-60e032dac438 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.697333] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7000cded-0e0f-429a-91a1-90fabf09d94b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.793503] env[63028]: DEBUG nova.network.neutron [-] [instance: c386c117-e255-4c3b-9a37-011e517277de] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.798890] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736023, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.812573] env[63028]: DEBUG nova.network.neutron [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 991.848550] env[63028]: DEBUG oslo_concurrency.lockutils [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.852086] env[63028]: DEBUG oslo_concurrency.lockutils [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] Releasing lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.852426] env[63028]: DEBUG nova.compute.manager [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Received event network-vif-plugged-c5f1d585-d624-4525-a5b2-132b18bf9378 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 991.852659] env[63028]: DEBUG oslo_concurrency.lockutils [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] Acquiring lock "d41a1eae-bb89-4222-9466-d86af891c654-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.852894] env[63028]: DEBUG oslo_concurrency.lockutils [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] Lock "d41a1eae-bb89-4222-9466-d86af891c654-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.853105] env[63028]: DEBUG oslo_concurrency.lockutils [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] Lock "d41a1eae-bb89-4222-9466-d86af891c654-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.853299] env[63028]: DEBUG nova.compute.manager [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] No waiting events found dispatching network-vif-plugged-c5f1d585-d624-4525-a5b2-132b18bf9378 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 991.853474] env[63028]: WARNING nova.compute.manager [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Received unexpected event network-vif-plugged-c5f1d585-d624-4525-a5b2-132b18bf9378 for instance with vm_state building and task_state spawning. [ 991.853641] env[63028]: DEBUG nova.compute.manager [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Received event network-changed-c5f1d585-d624-4525-a5b2-132b18bf9378 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 991.853849] env[63028]: DEBUG nova.compute.manager [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Refreshing instance network info cache due to event network-changed-c5f1d585-d624-4525-a5b2-132b18bf9378. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 991.855259] env[63028]: DEBUG oslo_concurrency.lockutils [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] Acquiring lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.855561] env[63028]: DEBUG oslo_concurrency.lockutils [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] Acquired lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.855829] env[63028]: DEBUG nova.network.neutron [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Refreshing network info cache for port c5f1d585-d624-4525-a5b2-132b18bf9378 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 991.870364] env[63028]: INFO nova.compute.manager [-] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Took 1.73 seconds to deallocate network for instance. [ 992.004959] env[63028]: DEBUG oslo_vmware.api [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172833} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.005248] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.005441] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 992.005622] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 992.005799] env[63028]: INFO nova.compute.manager [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Took 1.14 seconds to destroy the instance on the hypervisor. [ 992.006061] env[63028]: DEBUG oslo.service.loopingcall [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.006256] env[63028]: DEBUG nova.compute.manager [-] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 992.006350] env[63028]: DEBUG nova.network.neutron [-] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 992.056136] env[63028]: DEBUG oslo_vmware.api [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736026, 'name': ReconfigVM_Task, 'duration_secs': 0.17088} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.056136] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550831', 'volume_id': 'fff41433-1dbe-4075-9b8b-6bae1342802a', 'name': 'volume-fff41433-1dbe-4075-9b8b-6bae1342802a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8bb61bfa-d44e-4e06-867a-445d9b3db660', 'attached_at': '', 'detached_at': '', 'volume_id': 'fff41433-1dbe-4075-9b8b-6bae1342802a', 'serial': 'fff41433-1dbe-4075-9b8b-6bae1342802a'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 992.289040] env[63028]: DEBUG oslo_concurrency.lockutils [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.551s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.289040] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.072s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.289040] env[63028]: DEBUG nova.objects.instance [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lazy-loading 'pci_requests' on Instance uuid 63524cd8-81de-419f-bb07-0326f3cb393f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.302248] env[63028]: INFO nova.compute.manager [-] [instance: c386c117-e255-4c3b-9a37-011e517277de] Took 1.39 seconds to deallocate network for instance. [ 992.306229] env[63028]: DEBUG oslo_vmware.api [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736023, 'name': PowerOnVM_Task, 'duration_secs': 0.708876} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.310813] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 992.310813] env[63028]: INFO nova.compute.manager [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Took 8.33 seconds to spawn the instance on the hypervisor. [ 992.310813] env[63028]: DEBUG nova.compute.manager [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 992.312033] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9129e1ee-20fe-493b-9ce4-b9dbe66e02d1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.370141] env[63028]: DEBUG nova.network.neutron [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Updating instance_info_cache with network_info: [{"id": "eb3a6303-8ef2-4d1d-bcbc-58838b430d84", "address": "fa:16:3e:1e:e4:b6", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.247", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb3a6303-8e", "ovs_interfaceid": "eb3a6303-8ef2-4d1d-bcbc-58838b430d84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.381123] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.704674] env[63028]: DEBUG nova.objects.instance [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lazy-loading 'numa_topology' on Instance uuid 63524cd8-81de-419f-bb07-0326f3cb393f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.704674] env[63028]: DEBUG oslo_concurrency.lockutils [None req-459fd01e-3304-4c20-a3b6-e4be4925e19b tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 30.280s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.704674] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 3.075s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.704674] env[63028]: INFO nova.compute.manager [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Unshelving [ 992.806789] env[63028]: DEBUG nova.network.neutron [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updated VIF entry in instance network info cache for port c5f1d585-d624-4525-a5b2-132b18bf9378. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 992.806789] env[63028]: DEBUG nova.network.neutron [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance_info_cache with network_info: [{"id": "c5f1d585-d624-4525-a5b2-132b18bf9378", "address": "fa:16:3e:93:da:98", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5f1d585-d6", "ovs_interfaceid": "c5f1d585-d624-4525-a5b2-132b18bf9378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.811965] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.837046] env[63028]: INFO nova.compute.manager [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Took 38.59 seconds to build instance. [ 992.875122] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Releasing lock "refresh_cache-3fb46d02-7914-4d08-b63b-f3447ba1b81a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.875122] env[63028]: DEBUG nova.compute.manager [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Instance network_info: |[{"id": "eb3a6303-8ef2-4d1d-bcbc-58838b430d84", "address": "fa:16:3e:1e:e4:b6", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.247", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb3a6303-8e", "ovs_interfaceid": "eb3a6303-8ef2-4d1d-bcbc-58838b430d84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 992.875122] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:e4:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb3a6303-8ef2-4d1d-bcbc-58838b430d84', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 992.885294] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Creating folder: Project (b7b16d4124ad42aeb35b3ec3ebe1b92b). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 992.885973] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6db70a0f-42c2-4596-8882-3d055b13b263 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.898385] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Created folder: Project (b7b16d4124ad42aeb35b3ec3ebe1b92b) in parent group-v550570. [ 992.898597] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Creating folder: Instances. Parent ref: group-v550833. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 992.899959] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d35f7a9b-4a99-4451-9460-cc560af2cc5c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.909702] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Created folder: Instances in parent group-v550833. [ 992.911097] env[63028]: DEBUG oslo.service.loopingcall [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.911097] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 992.911097] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-443a62a2-59bc-4211-8a87-fa1fe5c684f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.932681] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 992.932681] env[63028]: value = "task-2736029" [ 992.932681] env[63028]: _type = "Task" [ 992.932681] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.941818] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736029, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.117733] env[63028]: DEBUG nova.objects.instance [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lazy-loading 'flavor' on Instance uuid 8bb61bfa-d44e-4e06-867a-445d9b3db660 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.138133] env[63028]: DEBUG nova.network.neutron [-] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.182372] env[63028]: INFO nova.compute.claims [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.233509] env[63028]: DEBUG nova.compute.manager [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Received event network-changed-7c42e931-e162-4201-8483-8606a86e0dff {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 993.233509] env[63028]: DEBUG nova.compute.manager [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Refreshing instance network info cache due to event network-changed-7c42e931-e162-4201-8483-8606a86e0dff. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 993.233679] env[63028]: DEBUG oslo_concurrency.lockutils [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] Acquiring lock "refresh_cache-c0693e4c-30b2-4eda-be1e-f6186d78038b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.233860] env[63028]: DEBUG oslo_concurrency.lockutils [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] Acquired lock "refresh_cache-c0693e4c-30b2-4eda-be1e-f6186d78038b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.236549] env[63028]: DEBUG nova.network.neutron [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Refreshing network info cache for port 7c42e931-e162-4201-8483-8606a86e0dff {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 993.309243] env[63028]: DEBUG oslo_concurrency.lockutils [req-40efe21b-5a1e-4188-be84-a9d1d17820e2 req-0e3e3a0c-312f-4c79-a378-faf4deb2b418 service nova] Releasing lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.338541] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2021c84d-1019-4fb3-b96c-749c278c13a7 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "d41a1eae-bb89-4222-9466-d86af891c654" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.112s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.451435] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736029, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.452476] env[63028]: DEBUG nova.network.neutron [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Successfully updated port: 5dc30fb0-c128-49d6-a5d1-cd0f53cc9958 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 993.590120] env[63028]: DEBUG nova.network.neutron [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Successfully updated port: d0308a48-57ab-41f7-bbab-6871ed89c5f2 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 993.622600] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a5eede3f-5eda-4dbb-8c63-064af2d03d0d tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.806s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.640832] env[63028]: INFO nova.compute.manager [-] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Took 1.63 seconds to deallocate network for instance. [ 993.704642] env[63028]: DEBUG nova.compute.utils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 993.944273] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736029, 'name': CreateVM_Task, 'duration_secs': 0.718612} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.947396] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 993.948368] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.948550] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.949277] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 993.949277] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd2d7ad0-6aa7-4239-8c45-cd541dd9e5d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.957158] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Acquiring lock "refresh_cache-53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.957158] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Acquired lock "refresh_cache-53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.957663] env[63028]: DEBUG nova.network.neutron [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.962548] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Waiting for the task: (returnval){ [ 993.962548] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52433779-f833-db49-bcd0-ea2292f0058e" [ 993.962548] env[63028]: _type = "Task" [ 993.962548] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.978450] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52433779-f833-db49-bcd0-ea2292f0058e, 'name': SearchDatastore_Task, 'duration_secs': 0.015295} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.979415] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.980233] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 993.980619] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.980959] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.981310] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 993.985044] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b5e39b5-fddd-4677-8203-cbb0eb231ae9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.001591] env[63028]: DEBUG oslo_concurrency.lockutils [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.002111] env[63028]: DEBUG oslo_concurrency.lockutils [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.002442] env[63028]: DEBUG oslo_concurrency.lockutils [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.002759] env[63028]: DEBUG oslo_concurrency.lockutils [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.003173] env[63028]: DEBUG oslo_concurrency.lockutils [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.006948] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 994.007282] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 994.008253] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43246747-83c4-4967-b6ca-a46eb8f58d9f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.012565] env[63028]: INFO nova.compute.manager [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Terminating instance [ 994.022108] env[63028]: DEBUG nova.compute.manager [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Received event network-vif-plugged-eb3a6303-8ef2-4d1d-bcbc-58838b430d84 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 994.022108] env[63028]: DEBUG oslo_concurrency.lockutils [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] Acquiring lock "3fb46d02-7914-4d08-b63b-f3447ba1b81a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.022108] env[63028]: DEBUG oslo_concurrency.lockutils [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] Lock "3fb46d02-7914-4d08-b63b-f3447ba1b81a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.022108] env[63028]: DEBUG oslo_concurrency.lockutils [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] Lock "3fb46d02-7914-4d08-b63b-f3447ba1b81a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.022341] env[63028]: DEBUG nova.compute.manager [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] No waiting events found dispatching network-vif-plugged-eb3a6303-8ef2-4d1d-bcbc-58838b430d84 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 994.022372] env[63028]: WARNING nova.compute.manager [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Received unexpected event network-vif-plugged-eb3a6303-8ef2-4d1d-bcbc-58838b430d84 for instance with vm_state building and task_state spawning. [ 994.022562] env[63028]: DEBUG nova.compute.manager [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Received event network-changed-eb3a6303-8ef2-4d1d-bcbc-58838b430d84 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 994.022711] env[63028]: DEBUG nova.compute.manager [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Refreshing instance network info cache due to event network-changed-eb3a6303-8ef2-4d1d-bcbc-58838b430d84. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 994.022893] env[63028]: DEBUG oslo_concurrency.lockutils [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] Acquiring lock "refresh_cache-3fb46d02-7914-4d08-b63b-f3447ba1b81a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.023042] env[63028]: DEBUG oslo_concurrency.lockutils [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] Acquired lock "refresh_cache-3fb46d02-7914-4d08-b63b-f3447ba1b81a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.023197] env[63028]: DEBUG nova.network.neutron [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Refreshing network info cache for port eb3a6303-8ef2-4d1d-bcbc-58838b430d84 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 994.034855] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Waiting for the task: (returnval){ [ 994.034855] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529ce25c-b034-5ca7-cd24-4ea0cbc4d3b7" [ 994.034855] env[63028]: _type = "Task" [ 994.034855] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.045382] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529ce25c-b034-5ca7-cd24-4ea0cbc4d3b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.082906] env[63028]: DEBUG nova.network.neutron [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Updated VIF entry in instance network info cache for port 7c42e931-e162-4201-8483-8606a86e0dff. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 994.083373] env[63028]: DEBUG nova.network.neutron [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Updating instance_info_cache with network_info: [{"id": "7c42e931-e162-4201-8483-8606a86e0dff", "address": "fa:16:3e:1a:2a:74", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c42e931-e1", "ovs_interfaceid": "7c42e931-e162-4201-8483-8606a86e0dff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.089443] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0789b53d-2026-4f29-83f4-709668fafec0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.099523] env[63028]: DEBUG oslo_concurrency.lockutils [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.099523] env[63028]: DEBUG oslo_concurrency.lockutils [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.099523] env[63028]: DEBUG nova.network.neutron [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 994.101494] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e5dd71-4bd7-47a2-94fe-076c59676750 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.139934] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf5f70c-ec48-48f2-8870-27cfada0aad6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.149528] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9540f5a7-7c10-4437-af3e-0fe8c46ac092 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.154909] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.166266] env[63028]: DEBUG nova.compute.provider_tree [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.208219] env[63028]: INFO nova.virt.block_device [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Booting with volume 996ec8ec-5318-4963-9384-330f1e01190d at /dev/sdb [ 994.251109] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59729624-a91c-482e-b57e-39afb21629a9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.260837] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db31b547-a481-482f-93b6-c8cd18ef91ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.294759] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39066d72-7113-42e2-bc64-3fa6c5900452 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.304186] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120a1297-8f54-4186-a041-266418b69d2f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.339941] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61451bf0-675a-4aae-8422-8122356bdadf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.346608] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a251b8-a645-4e1a-aefe-07649dc5eca1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.359436] env[63028]: DEBUG nova.virt.block_device [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updating existing volume attachment record: ec214d8b-a579-41a9-8205-e58ea8474d64 {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 994.511774] env[63028]: DEBUG nova.network.neutron [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 994.535565] env[63028]: DEBUG nova.compute.manager [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 994.536037] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 994.540176] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f663263-3b3b-4fd8-915b-95ceca2bca7f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.555255] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529ce25c-b034-5ca7-cd24-4ea0cbc4d3b7, 'name': SearchDatastore_Task, 'duration_secs': 0.012038} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.558514] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.558957] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-417679ac-f621-459b-8b89-50b0fb4186e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.562532] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43141483-6308-44a3-941b-aba7cbe2a73e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.567273] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Waiting for the task: (returnval){ [ 994.567273] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b88de1-98bd-f8fd-5016-d21d01506c5f" [ 994.567273] env[63028]: _type = "Task" [ 994.567273] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.572103] env[63028]: DEBUG oslo_vmware.api [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 994.572103] env[63028]: value = "task-2736031" [ 994.572103] env[63028]: _type = "Task" [ 994.572103] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.581096] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b88de1-98bd-f8fd-5016-d21d01506c5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.586721] env[63028]: DEBUG oslo_concurrency.lockutils [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] Releasing lock "refresh_cache-c0693e4c-30b2-4eda-be1e-f6186d78038b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.587105] env[63028]: DEBUG nova.compute.manager [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Received event network-vif-unplugged-db82c13e-74f6-431e-9184-2375c4a0bbbc {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 994.587251] env[63028]: DEBUG oslo_concurrency.lockutils [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] Acquiring lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.587484] env[63028]: DEBUG oslo_concurrency.lockutils [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] Lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.587668] env[63028]: DEBUG oslo_concurrency.lockutils [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] Lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.587842] env[63028]: DEBUG nova.compute.manager [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] No waiting events found dispatching network-vif-unplugged-db82c13e-74f6-431e-9184-2375c4a0bbbc {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 994.588048] env[63028]: WARNING nova.compute.manager [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Received unexpected event network-vif-unplugged-db82c13e-74f6-431e-9184-2375c4a0bbbc for instance with vm_state shelved_offloaded and task_state None. [ 994.588232] env[63028]: DEBUG nova.compute.manager [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Received event network-changed-db82c13e-74f6-431e-9184-2375c4a0bbbc {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 994.588392] env[63028]: DEBUG nova.compute.manager [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Refreshing instance network info cache due to event network-changed-db82c13e-74f6-431e-9184-2375c4a0bbbc. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 994.588598] env[63028]: DEBUG oslo_concurrency.lockutils [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] Acquiring lock "refresh_cache-ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.588738] env[63028]: DEBUG oslo_concurrency.lockutils [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] Acquired lock "refresh_cache-ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.588912] env[63028]: DEBUG nova.network.neutron [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Refreshing network info cache for port db82c13e-74f6-431e-9184-2375c4a0bbbc {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 994.590210] env[63028]: DEBUG oslo_vmware.api [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736031, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.669548] env[63028]: DEBUG nova.scheduler.client.report [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 994.695891] env[63028]: WARNING nova.network.neutron [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] c2f1496c-e3fd-43db-a032-12cdacdb4e46 already exists in list: networks containing: ['c2f1496c-e3fd-43db-a032-12cdacdb4e46']. ignoring it [ 994.695891] env[63028]: WARNING nova.network.neutron [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] c2f1496c-e3fd-43db-a032-12cdacdb4e46 already exists in list: networks containing: ['c2f1496c-e3fd-43db-a032-12cdacdb4e46']. ignoring it [ 994.908576] env[63028]: INFO nova.compute.manager [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Rescuing [ 994.908855] env[63028]: DEBUG oslo_concurrency.lockutils [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.909309] env[63028]: DEBUG oslo_concurrency.lockutils [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.909309] env[63028]: DEBUG nova.network.neutron [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 994.930095] env[63028]: DEBUG nova.network.neutron [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Updating instance_info_cache with network_info: [{"id": "5dc30fb0-c128-49d6-a5d1-cd0f53cc9958", "address": "fa:16:3e:8c:5e:8c", "network": {"id": "62aa2116-8683-4ddd-a3e5-0b23a102cee0", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-251424440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e09fff22285147c29cb2bb096bde3921", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc30fb0-c1", "ovs_interfaceid": "5dc30fb0-c128-49d6-a5d1-cd0f53cc9958", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.083318] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b88de1-98bd-f8fd-5016-d21d01506c5f, 'name': SearchDatastore_Task, 'duration_secs': 0.011437} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.084161] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.084511] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 3fb46d02-7914-4d08-b63b-f3447ba1b81a/3fb46d02-7914-4d08-b63b-f3447ba1b81a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 995.084934] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07636e43-9f9d-41d1-a6d2-a067ce6d3ff9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.091180] env[63028]: DEBUG oslo_vmware.api [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736031, 'name': PowerOffVM_Task, 'duration_secs': 0.233096} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.094483] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.095718] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.097544] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe4dddee-95c4-4484-809e-e6b7d00d9b7e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.100491] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Waiting for the task: (returnval){ [ 995.100491] env[63028]: value = "task-2736034" [ 995.100491] env[63028]: _type = "Task" [ 995.100491] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.109870] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736034, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.116075] env[63028]: DEBUG nova.network.neutron [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Updated VIF entry in instance network info cache for port eb3a6303-8ef2-4d1d-bcbc-58838b430d84. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.116576] env[63028]: DEBUG nova.network.neutron [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Updating instance_info_cache with network_info: [{"id": "eb3a6303-8ef2-4d1d-bcbc-58838b430d84", "address": "fa:16:3e:1e:e4:b6", "network": {"id": "47c482bc-2ff1-431d-8910-0bf36def79a2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.247", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "96661ac8d4f04d6e97eea4809b444133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb3a6303-8e", "ovs_interfaceid": "eb3a6303-8ef2-4d1d-bcbc-58838b430d84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.169332] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.169655] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.169813] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleting the datastore file [datastore2] ed872f21-c2c4-4597-8c9e-9f8d2202b707 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.170102] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84d2e17b-7e6b-4033-9f8e-bbe71e071353 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.177960] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.010s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.180544] env[63028]: DEBUG oslo_vmware.api [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 995.180544] env[63028]: value = "task-2736036" [ 995.180544] env[63028]: _type = "Task" [ 995.180544] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.181077] env[63028]: DEBUG oslo_concurrency.lockutils [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.937s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.181077] env[63028]: DEBUG nova.objects.instance [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63028) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 995.193401] env[63028]: DEBUG oslo_vmware.api [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736036, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.255241] env[63028]: INFO nova.network.neutron [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Updating port 296dfd9e-84e1-4ea8-bd17-28920a6a048b with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 995.435588] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Releasing lock "refresh_cache-53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.435911] env[63028]: DEBUG nova.compute.manager [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Instance network_info: |[{"id": "5dc30fb0-c128-49d6-a5d1-cd0f53cc9958", "address": "fa:16:3e:8c:5e:8c", "network": {"id": "62aa2116-8683-4ddd-a3e5-0b23a102cee0", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-251424440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e09fff22285147c29cb2bb096bde3921", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc30fb0-c1", "ovs_interfaceid": "5dc30fb0-c128-49d6-a5d1-cd0f53cc9958", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 995.436580] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:5e:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6edb8eae-1113-49d0-84f7-9fd9f82b26fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5dc30fb0-c128-49d6-a5d1-cd0f53cc9958', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 995.444699] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Creating folder: Project (e09fff22285147c29cb2bb096bde3921). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 995.445602] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5ca04ca-04e5-40c1-b1a8-27f09df46a13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.458419] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Created folder: Project (e09fff22285147c29cb2bb096bde3921) in parent group-v550570. [ 995.458657] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Creating folder: Instances. Parent ref: group-v550838. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 995.459565] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8075816-f4a8-403e-af8d-3c96e0456ea4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.470270] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Created folder: Instances in parent group-v550838. [ 995.470270] env[63028]: DEBUG oslo.service.loopingcall [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 995.470270] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 995.470270] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-792dfca5-9dc5-487f-9584-8d0c3c3bb140 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.494300] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 995.494300] env[63028]: value = "task-2736039" [ 995.494300] env[63028]: _type = "Task" [ 995.494300] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.501683] env[63028]: DEBUG oslo_concurrency.lockutils [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.512271] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736039, 'name': CreateVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.611566] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736034, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.620767] env[63028]: DEBUG oslo_concurrency.lockutils [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] Releasing lock "refresh_cache-3fb46d02-7914-4d08-b63b-f3447ba1b81a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.621256] env[63028]: DEBUG nova.compute.manager [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Received event network-vif-deleted-3cae8ebc-a19e-401f-aa80-28da2e6bcd42 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 995.621566] env[63028]: DEBUG nova.compute.manager [req-dba32073-32c7-4b24-be6f-b812f29002e3 req-340ae715-fdca-499e-8de1-3714db3f0fd4 service nova] [instance: c386c117-e255-4c3b-9a37-011e517277de] Received event network-vif-deleted-0e62de6a-f8ad-4958-81a2-9ff79a6bea03 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 995.643735] env[63028]: DEBUG nova.network.neutron [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Updated VIF entry in instance network info cache for port db82c13e-74f6-431e-9184-2375c4a0bbbc. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.644189] env[63028]: DEBUG nova.network.neutron [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Updating instance_info_cache with network_info: [{"id": "db82c13e-74f6-431e-9184-2375c4a0bbbc", "address": "fa:16:3e:1d:fe:e0", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": null, "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapdb82c13e-74", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.698797] env[63028]: DEBUG oslo_vmware.api [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736036, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.480248} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.702140] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 995.702403] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 995.702585] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 995.702757] env[63028]: INFO nova.compute.manager [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Took 1.17 seconds to destroy the instance on the hypervisor. [ 995.703000] env[63028]: DEBUG oslo.service.loopingcall [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 995.703209] env[63028]: DEBUG nova.compute.manager [-] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 995.703300] env[63028]: DEBUG nova.network.neutron [-] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 995.837677] env[63028]: DEBUG nova.network.neutron [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updating instance_info_cache with network_info: [{"id": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "address": "fa:16:3e:90:60:0c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9efd2ef2-d3", "ovs_interfaceid": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98722f90-f2d2-4a4a-9e68-ad5c32b18435", "address": "fa:16:3e:ab:1c:75", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98722f90-f2", "ovs_interfaceid": "98722f90-f2d2-4a4a-9e68-ad5c32b18435", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d0308a48-57ab-41f7-bbab-6871ed89c5f2", "address": "fa:16:3e:c8:52:ef", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0308a48-57", "ovs_interfaceid": "d0308a48-57ab-41f7-bbab-6871ed89c5f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.004544] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736039, 'name': CreateVM_Task, 'duration_secs': 0.415014} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.004743] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 996.005480] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.005666] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.006013] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 996.006313] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a94388c4-407a-4ce2-b1f0-acec1d757a9b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.011153] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Waiting for the task: (returnval){ [ 996.011153] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ae7300-468d-a73d-e7b7-d20a1aaf7948" [ 996.011153] env[63028]: _type = "Task" [ 996.011153] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.020849] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ae7300-468d-a73d-e7b7-d20a1aaf7948, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.038979] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.039228] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.041219] env[63028]: DEBUG nova.network.neutron [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updating instance_info_cache with network_info: [{"id": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "address": "fa:16:3e:cb:aa:63", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0d7c3ce-e8", "ovs_interfaceid": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.116821] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736034, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55147} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.117109] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 3fb46d02-7914-4d08-b63b-f3447ba1b81a/3fb46d02-7914-4d08-b63b-f3447ba1b81a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 996.117380] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 996.117660] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f60a3a80-a600-4017-b118-4781c7392e75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.127885] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Waiting for the task: (returnval){ [ 996.127885] env[63028]: value = "task-2736040" [ 996.127885] env[63028]: _type = "Task" [ 996.127885] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.136497] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736040, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.149422] env[63028]: DEBUG oslo_concurrency.lockutils [req-f61e0e23-5664-4e28-9b7a-c7069bfe9b96 req-a1ed1437-e624-43b0-b688-f1ac56871967 service nova] Releasing lock "refresh_cache-ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.193109] env[63028]: DEBUG oslo_concurrency.lockutils [None req-925c90aa-25f7-4adb-b869-1178457c8d8e tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.195131] env[63028]: DEBUG oslo_concurrency.lockutils [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.346s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.195131] env[63028]: DEBUG nova.objects.instance [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lazy-loading 'resources' on Instance uuid ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.341227] env[63028]: DEBUG oslo_concurrency.lockutils [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.341914] env[63028]: DEBUG oslo_concurrency.lockutils [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.342103] env[63028]: DEBUG oslo_concurrency.lockutils [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.342947] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe2a695-d0ce-4dcb-bf0b-f5a834de6388 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.360795] env[63028]: DEBUG nova.virt.hardware [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 996.361119] env[63028]: DEBUG nova.virt.hardware [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 996.361288] env[63028]: DEBUG nova.virt.hardware [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 996.361482] env[63028]: DEBUG nova.virt.hardware [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 996.361626] env[63028]: DEBUG nova.virt.hardware [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 996.361770] env[63028]: DEBUG nova.virt.hardware [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 996.361983] env[63028]: DEBUG nova.virt.hardware [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 996.362160] env[63028]: DEBUG nova.virt.hardware [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 996.362330] env[63028]: DEBUG nova.virt.hardware [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 996.362491] env[63028]: DEBUG nova.virt.hardware [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 996.362664] env[63028]: DEBUG nova.virt.hardware [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 996.369513] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Reconfiguring VM to attach interface {{(pid=63028) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 996.369910] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd2695a4-9ce1-4c2d-a697-4a90129427ba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.388091] env[63028]: DEBUG oslo_vmware.api [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 996.388091] env[63028]: value = "task-2736041" [ 996.388091] env[63028]: _type = "Task" [ 996.388091] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.396173] env[63028]: DEBUG oslo_vmware.api [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736041, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.521654] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ae7300-468d-a73d-e7b7-d20a1aaf7948, 'name': SearchDatastore_Task, 'duration_secs': 0.010567} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.521946] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.522198] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.522434] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.522579] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.522756] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 996.523080] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03344d79-58de-4827-b7b0-15c3c44f2bc7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.531424] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 996.531598] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 996.532373] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c023da7d-4481-408e-acd1-6948a730b43d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.537410] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Waiting for the task: (returnval){ [ 996.537410] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525e0770-abe1-acb7-18d2-c3a2c8a0f236" [ 996.537410] env[63028]: _type = "Task" [ 996.537410] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.543539] env[63028]: DEBUG nova.compute.manager [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 996.546157] env[63028]: DEBUG oslo_concurrency.lockutils [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.551393] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525e0770-abe1-acb7-18d2-c3a2c8a0f236, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.638098] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736040, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.256039} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.638386] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 996.639176] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffd49ad-f0b7-487e-b94b-02fe303d45fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.655406] env[63028]: DEBUG nova.network.neutron [-] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.665214] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 3fb46d02-7914-4d08-b63b-f3447ba1b81a/3fb46d02-7914-4d08-b63b-f3447ba1b81a.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.665761] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4baad7ff-46d1-4f07-a6fa-1de52d6f755b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.686836] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Waiting for the task: (returnval){ [ 996.686836] env[63028]: value = "task-2736042" [ 996.686836] env[63028]: _type = "Task" [ 996.686836] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.695821] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736042, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.698794] env[63028]: DEBUG nova.objects.instance [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lazy-loading 'numa_topology' on Instance uuid ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.814661] env[63028]: DEBUG nova.compute.manager [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Received event network-vif-deleted-63f3891b-02ff-4d0f-9fb9-6fff4d3b71b5 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 996.816037] env[63028]: DEBUG nova.compute.manager [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Received event network-vif-plugged-5dc30fb0-c128-49d6-a5d1-cd0f53cc9958 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 996.816037] env[63028]: DEBUG oslo_concurrency.lockutils [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] Acquiring lock "53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.816037] env[63028]: DEBUG oslo_concurrency.lockutils [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] Lock "53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.816037] env[63028]: DEBUG oslo_concurrency.lockutils [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] Lock "53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.816037] env[63028]: DEBUG nova.compute.manager [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] No waiting events found dispatching network-vif-plugged-5dc30fb0-c128-49d6-a5d1-cd0f53cc9958 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 996.816037] env[63028]: WARNING nova.compute.manager [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Received unexpected event network-vif-plugged-5dc30fb0-c128-49d6-a5d1-cd0f53cc9958 for instance with vm_state building and task_state spawning. [ 996.816037] env[63028]: DEBUG nova.compute.manager [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received event network-vif-plugged-d0308a48-57ab-41f7-bbab-6871ed89c5f2 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 996.816037] env[63028]: DEBUG oslo_concurrency.lockutils [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] Acquiring lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.816351] env[63028]: DEBUG oslo_concurrency.lockutils [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] Lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.816351] env[63028]: DEBUG oslo_concurrency.lockutils [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] Lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.816438] env[63028]: DEBUG nova.compute.manager [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] No waiting events found dispatching network-vif-plugged-d0308a48-57ab-41f7-bbab-6871ed89c5f2 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 996.816909] env[63028]: WARNING nova.compute.manager [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received unexpected event network-vif-plugged-d0308a48-57ab-41f7-bbab-6871ed89c5f2 for instance with vm_state active and task_state None. [ 996.816909] env[63028]: DEBUG nova.compute.manager [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Received event network-changed-5dc30fb0-c128-49d6-a5d1-cd0f53cc9958 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 996.816909] env[63028]: DEBUG nova.compute.manager [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Refreshing instance network info cache due to event network-changed-5dc30fb0-c128-49d6-a5d1-cd0f53cc9958. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 996.817076] env[63028]: DEBUG oslo_concurrency.lockutils [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] Acquiring lock "refresh_cache-53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.817211] env[63028]: DEBUG oslo_concurrency.lockutils [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] Acquired lock "refresh_cache-53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.817363] env[63028]: DEBUG nova.network.neutron [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Refreshing network info cache for port 5dc30fb0-c128-49d6-a5d1-cd0f53cc9958 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 996.833404] env[63028]: DEBUG nova.compute.manager [req-bef516f7-3038-4ad4-ad8b-139f265520f2 req-11c89e50-add8-4ef9-88c8-647f84a11eed service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Received event network-changed-c5f1d585-d624-4525-a5b2-132b18bf9378 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 996.833671] env[63028]: DEBUG nova.compute.manager [req-bef516f7-3038-4ad4-ad8b-139f265520f2 req-11c89e50-add8-4ef9-88c8-647f84a11eed service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Refreshing instance network info cache due to event network-changed-c5f1d585-d624-4525-a5b2-132b18bf9378. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 996.833972] env[63028]: DEBUG oslo_concurrency.lockutils [req-bef516f7-3038-4ad4-ad8b-139f265520f2 req-11c89e50-add8-4ef9-88c8-647f84a11eed service nova] Acquiring lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.834184] env[63028]: DEBUG oslo_concurrency.lockutils [req-bef516f7-3038-4ad4-ad8b-139f265520f2 req-11c89e50-add8-4ef9-88c8-647f84a11eed service nova] Acquired lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.834369] env[63028]: DEBUG nova.network.neutron [req-bef516f7-3038-4ad4-ad8b-139f265520f2 req-11c89e50-add8-4ef9-88c8-647f84a11eed service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Refreshing network info cache for port c5f1d585-d624-4525-a5b2-132b18bf9378 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 996.901588] env[63028]: DEBUG oslo_vmware.api [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736041, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.047566] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525e0770-abe1-acb7-18d2-c3a2c8a0f236, 'name': SearchDatastore_Task, 'duration_secs': 0.00818} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.048424] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c8c6786-de8a-4287-9796-4083bb8e2a74 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.054568] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Waiting for the task: (returnval){ [ 997.054568] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c2b821-80d4-b084-a199-65502045f9fd" [ 997.054568] env[63028]: _type = "Task" [ 997.054568] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.064789] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c2b821-80d4-b084-a199-65502045f9fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.074445] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.166470] env[63028]: INFO nova.compute.manager [-] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Took 1.46 seconds to deallocate network for instance. [ 997.201530] env[63028]: DEBUG nova.objects.base [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 997.203552] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736042, 'name': ReconfigVM_Task, 'duration_secs': 0.477984} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.204204] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 3fb46d02-7914-4d08-b63b-f3447ba1b81a/3fb46d02-7914-4d08-b63b-f3447ba1b81a.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 997.205636] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-60e9f14c-8012-4aa2-a5b2-acb76c0b3bdf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.213458] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Waiting for the task: (returnval){ [ 997.213458] env[63028]: value = "task-2736044" [ 997.213458] env[63028]: _type = "Task" [ 997.213458] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.226526] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736044, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.296719] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.296823] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.296967] env[63028]: DEBUG nova.network.neutron [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.402247] env[63028]: DEBUG oslo_vmware.api [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736041, 'name': ReconfigVM_Task, 'duration_secs': 0.628728} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.404641] env[63028]: DEBUG oslo_concurrency.lockutils [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.404842] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Reconfigured VM to attach interface {{(pid=63028) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 997.564182] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de07be12-cb69-4ae9-8ee8-55cddd102d51 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.574839] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c2b821-80d4-b084-a199-65502045f9fd, 'name': SearchDatastore_Task, 'duration_secs': 0.009881} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.579246] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.579526] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7/53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 997.579837] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc965626-6665-41dd-8fba-2e7c4e74a3d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.583576] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d879cd51-3e14-4b56-bb57-355fecfa10c9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.642273] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f81ce7f-11fb-4211-84ad-b4db0b4db03c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.644856] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Waiting for the task: (returnval){ [ 997.644856] env[63028]: value = "task-2736045" [ 997.644856] env[63028]: _type = "Task" [ 997.644856] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.654133] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d538d9c2-cdcc-4f00-b2b1-3b0550879cc0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.662288] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736045, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.676997] env[63028]: DEBUG oslo_concurrency.lockutils [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.677538] env[63028]: DEBUG nova.compute.provider_tree [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.699415] env[63028]: DEBUG nova.network.neutron [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Updated VIF entry in instance network info cache for port 5dc30fb0-c128-49d6-a5d1-cd0f53cc9958. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 997.699850] env[63028]: DEBUG nova.network.neutron [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Updating instance_info_cache with network_info: [{"id": "5dc30fb0-c128-49d6-a5d1-cd0f53cc9958", "address": "fa:16:3e:8c:5e:8c", "network": {"id": "62aa2116-8683-4ddd-a3e5-0b23a102cee0", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-251424440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e09fff22285147c29cb2bb096bde3921", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc30fb0-c1", "ovs_interfaceid": "5dc30fb0-c128-49d6-a5d1-cd0f53cc9958", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.702301] env[63028]: DEBUG nova.network.neutron [req-bef516f7-3038-4ad4-ad8b-139f265520f2 req-11c89e50-add8-4ef9-88c8-647f84a11eed service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updated VIF entry in instance network info cache for port c5f1d585-d624-4525-a5b2-132b18bf9378. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 997.702650] env[63028]: DEBUG nova.network.neutron [req-bef516f7-3038-4ad4-ad8b-139f265520f2 req-11c89e50-add8-4ef9-88c8-647f84a11eed service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance_info_cache with network_info: [{"id": "c5f1d585-d624-4525-a5b2-132b18bf9378", "address": "fa:16:3e:93:da:98", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5f1d585-d6", "ovs_interfaceid": "c5f1d585-d624-4525-a5b2-132b18bf9378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.724671] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736044, 'name': Rename_Task, 'duration_secs': 0.335742} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.724975] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 997.725274] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9163294a-bfa6-4b3e-a193-ccb5eeeb5cfa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.732241] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Waiting for the task: (returnval){ [ 997.732241] env[63028]: value = "task-2736046" [ 997.732241] env[63028]: _type = "Task" [ 997.732241] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.742258] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736046, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.911471] env[63028]: DEBUG oslo_concurrency.lockutils [None req-599839db-ace5-459e-85d1-da82c2874637 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-b77ba7d6-305e-4b60-a4b7-9353c12c3920-d0308a48-57ab-41f7-bbab-6871ed89c5f2" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.019s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.029639] env[63028]: DEBUG nova.network.neutron [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Updating instance_info_cache with network_info: [{"id": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "address": "fa:16:3e:bc:87:07", "network": {"id": "37013470-5bda-41a6-826c-03ac0d423c85", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1214578902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d3fdfda1694b2f9f5985831ea77a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap296dfd9e-84", "ovs_interfaceid": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.136438] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.136782] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08219cbc-ac8d-409f-aac6-d4236ec6ded6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.144575] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 998.144575] env[63028]: value = "task-2736047" [ 998.144575] env[63028]: _type = "Task" [ 998.144575] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.156590] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736047, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.160419] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736045, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501137} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.160760] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7/53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 998.161040] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 998.161296] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90371751-370e-4666-a877-9cab11eb5e9e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.168585] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Waiting for the task: (returnval){ [ 998.168585] env[63028]: value = "task-2736048" [ 998.168585] env[63028]: _type = "Task" [ 998.168585] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.176936] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.180940] env[63028]: DEBUG nova.scheduler.client.report [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 998.205115] env[63028]: DEBUG oslo_concurrency.lockutils [req-bef516f7-3038-4ad4-ad8b-139f265520f2 req-11c89e50-add8-4ef9-88c8-647f84a11eed service nova] Releasing lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.205756] env[63028]: DEBUG oslo_concurrency.lockutils [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] Releasing lock "refresh_cache-53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.205993] env[63028]: DEBUG nova.compute.manager [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received event network-changed-d0308a48-57ab-41f7-bbab-6871ed89c5f2 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 998.206166] env[63028]: DEBUG nova.compute.manager [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Refreshing instance network info cache due to event network-changed-d0308a48-57ab-41f7-bbab-6871ed89c5f2. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 998.206402] env[63028]: DEBUG oslo_concurrency.lockutils [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] Acquiring lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.206550] env[63028]: DEBUG oslo_concurrency.lockutils [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] Acquired lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.206736] env[63028]: DEBUG nova.network.neutron [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Refreshing network info cache for port d0308a48-57ab-41f7-bbab-6871ed89c5f2 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 998.243385] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736046, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.534748] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Releasing lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.568953] env[63028]: DEBUG nova.virt.hardware [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='77db19751e0e54346d98515d7d64aedc',container_format='bare',created_at=2025-02-20T18:00:44Z,direct_url=,disk_format='vmdk',id=398e3010-a42d-4c4b-8604-11f5a3e99cff,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1375224806-shelved',owner='98d3fdfda1694b2f9f5985831ea77a21',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2025-02-20T18:01:00Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 998.569295] env[63028]: DEBUG nova.virt.hardware [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 998.569508] env[63028]: DEBUG nova.virt.hardware [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 998.569873] env[63028]: DEBUG nova.virt.hardware [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 998.570104] env[63028]: DEBUG nova.virt.hardware [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 998.570406] env[63028]: DEBUG nova.virt.hardware [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 998.570609] env[63028]: DEBUG nova.virt.hardware [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 998.570926] env[63028]: DEBUG nova.virt.hardware [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 998.571199] env[63028]: DEBUG nova.virt.hardware [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 998.571526] env[63028]: DEBUG nova.virt.hardware [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 998.571730] env[63028]: DEBUG nova.virt.hardware [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 998.573043] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53050748-580a-4033-b324-4db57e41ea12 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.583145] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315686db-5060-4e04-b26b-c31e7e8b6fff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.603997] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:87:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '296dfd9e-84e1-4ea8-bd17-28920a6a048b', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 998.616107] env[63028]: DEBUG oslo.service.loopingcall [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 998.616107] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 998.616107] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e27e846a-f791-4474-8bd1-093da02084d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.636307] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 998.636307] env[63028]: value = "task-2736049" [ 998.636307] env[63028]: _type = "Task" [ 998.636307] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.645556] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736049, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.659036] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736047, 'name': PowerOffVM_Task, 'duration_secs': 0.208753} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.660020] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 998.661137] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7b4647-0fe2-4251-be33-45c7384837a6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.698762] env[63028]: DEBUG oslo_concurrency.lockutils [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.504s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.708767] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.328s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.709275] env[63028]: DEBUG nova.objects.instance [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Lazy-loading 'resources' on Instance uuid 455578fa-7468-40dc-8c0a-37ac35e5c0a0 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.712683] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc9c76d-856b-4303-b3ad-936e0efc1e25 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.731151] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086743} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.731448] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 998.732256] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89efa0ef-5678-4483-bd9a-01662c3e09bd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.743341] env[63028]: DEBUG oslo_vmware.api [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736046, 'name': PowerOnVM_Task, 'duration_secs': 0.982579} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.752395] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 998.752784] env[63028]: INFO nova.compute.manager [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Took 9.70 seconds to spawn the instance on the hypervisor. [ 998.752842] env[63028]: DEBUG nova.compute.manager [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 998.761391] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7/53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.764760] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7975950-da53-4c98-b907-470b10bc9dc1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.767208] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d512983-cae0-4811-bf84-24e2b985e253 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.788855] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.788855] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c370c266-456f-4f1f-847a-4088d2b6d1f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.792553] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Waiting for the task: (returnval){ [ 998.792553] env[63028]: value = "task-2736050" [ 998.792553] env[63028]: _type = "Task" [ 998.792553] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.801874] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 998.801874] env[63028]: value = "task-2736051" [ 998.801874] env[63028]: _type = "Task" [ 998.801874] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.805081] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736050, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.813533] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 998.813783] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 998.814082] env[63028]: DEBUG oslo_concurrency.lockutils [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.814275] env[63028]: DEBUG oslo_concurrency.lockutils [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.814428] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 998.814679] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f363cfe4-c50c-4564-a7c7-a8baf72cae13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.823935] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 998.823935] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 998.825135] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-562fb47f-a0a8-4932-9700-d1cc00ee09bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.834918] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 998.834918] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ea0280-f81c-f039-9322-da701437479a" [ 998.834918] env[63028]: _type = "Task" [ 998.834918] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.845641] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ea0280-f81c-f039-9322-da701437479a, 'name': SearchDatastore_Task} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.846852] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68d0404b-1831-48fe-900c-bb743ef8ec2f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.854442] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 998.854442] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5235a8a2-f7ef-f313-e1aa-e4143871972c" [ 998.854442] env[63028]: _type = "Task" [ 998.854442] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.862388] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5235a8a2-f7ef-f313-e1aa-e4143871972c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.004042] env[63028]: DEBUG nova.network.neutron [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updated VIF entry in instance network info cache for port d0308a48-57ab-41f7-bbab-6871ed89c5f2. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 999.004744] env[63028]: DEBUG nova.network.neutron [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updating instance_info_cache with network_info: [{"id": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "address": "fa:16:3e:90:60:0c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9efd2ef2-d3", "ovs_interfaceid": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98722f90-f2d2-4a4a-9e68-ad5c32b18435", "address": "fa:16:3e:ab:1c:75", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98722f90-f2", "ovs_interfaceid": "98722f90-f2d2-4a4a-9e68-ad5c32b18435", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d0308a48-57ab-41f7-bbab-6871ed89c5f2", "address": "fa:16:3e:c8:52:ef", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0308a48-57", "ovs_interfaceid": "d0308a48-57ab-41f7-bbab-6871ed89c5f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.146855] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736049, 'name': CreateVM_Task, 'duration_secs': 0.358794} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.147161] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 999.147835] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.147996] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired lock "[datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.148517] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 999.148780] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c914f0a-e8fe-41ec-b47f-eba109832e9d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.153766] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 999.153766] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ea0683-60b3-e74b-b970-1715c7511b75" [ 999.153766] env[63028]: _type = "Task" [ 999.153766] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.165665] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ea0683-60b3-e74b-b970-1715c7511b75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.181923] env[63028]: DEBUG nova.compute.manager [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Received event network-vif-deleted-cd8436f9-6412-468e-bd24-f9d845d3ca21 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 999.181923] env[63028]: DEBUG nova.compute.manager [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Received event network-vif-plugged-296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 999.183155] env[63028]: DEBUG oslo_concurrency.lockutils [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] Acquiring lock "63524cd8-81de-419f-bb07-0326f3cb393f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.183336] env[63028]: DEBUG oslo_concurrency.lockutils [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] Lock "63524cd8-81de-419f-bb07-0326f3cb393f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.183762] env[63028]: DEBUG oslo_concurrency.lockutils [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] Lock "63524cd8-81de-419f-bb07-0326f3cb393f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.184040] env[63028]: DEBUG nova.compute.manager [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] No waiting events found dispatching network-vif-plugged-296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 999.184236] env[63028]: WARNING nova.compute.manager [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Received unexpected event network-vif-plugged-296dfd9e-84e1-4ea8-bd17-28920a6a048b for instance with vm_state shelved_offloaded and task_state spawning. [ 999.187989] env[63028]: DEBUG nova.compute.manager [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Received event network-changed-296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 999.187989] env[63028]: DEBUG nova.compute.manager [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Refreshing instance network info cache due to event network-changed-296dfd9e-84e1-4ea8-bd17-28920a6a048b. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 999.187989] env[63028]: DEBUG oslo_concurrency.lockutils [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] Acquiring lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.187989] env[63028]: DEBUG oslo_concurrency.lockutils [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] Acquired lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.187989] env[63028]: DEBUG nova.network.neutron [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Refreshing network info cache for port 296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.222909] env[63028]: DEBUG oslo_concurrency.lockutils [None req-029bbebf-b712-4d44-b93d-a9ad42e63e3e tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 29.313s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.224116] env[63028]: DEBUG oslo_concurrency.lockutils [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 3.722s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.224437] env[63028]: DEBUG oslo_concurrency.lockutils [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.224937] env[63028]: DEBUG oslo_concurrency.lockutils [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.224937] env[63028]: DEBUG oslo_concurrency.lockutils [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.229911] env[63028]: INFO nova.compute.manager [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Terminating instance [ 999.312131] env[63028]: INFO nova.compute.manager [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Took 34.25 seconds to build instance. [ 999.318591] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736050, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.375289] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5235a8a2-f7ef-f313-e1aa-e4143871972c, 'name': SearchDatastore_Task, 'duration_secs': 0.010434} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.375289] env[63028]: DEBUG oslo_concurrency.lockutils [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.375289] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 8bb61bfa-d44e-4e06-867a-445d9b3db660/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk. {{(pid=63028) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 999.375289] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84b1bcdc-8a8e-46ed-81fd-d2ebc662c8c9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.384120] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 999.384120] env[63028]: value = "task-2736052" [ 999.384120] env[63028]: _type = "Task" [ 999.384120] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.396438] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.510245] env[63028]: DEBUG oslo_concurrency.lockutils [req-f87dfe65-655a-4feb-bb84-375d0ac28c6e req-49616ce6-ee16-4cf6-9ab2-c4bd8ea300a9 service nova] Releasing lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.573933] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6ea579-8425-4994-81fd-4b12fcc18acd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.582218] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1328749-61f7-40c0-a74d-809138a1ccf8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.619474] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80ef095-4e87-4b6e-b34b-d57a1cbf2b95 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.631586] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf285ac-3a0a-4a77-bf7b-7fe42c70ec4b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.649310] env[63028]: DEBUG nova.compute.provider_tree [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.663757] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Releasing lock "[datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.664056] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Processing image 398e3010-a42d-4c4b-8604-11f5a3e99cff {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 999.664311] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff/398e3010-a42d-4c4b-8604-11f5a3e99cff.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.664590] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired lock "[datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff/398e3010-a42d-4c4b-8604-11f5a3e99cff.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.664651] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 999.664875] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20e22144-3c99-41b6-a5e6-abd0b8a66ab3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.677656] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 999.677903] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 999.678648] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d71dcc2-8d5a-47e9-ba31-71e193420833 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.684752] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 999.684752] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5281c838-861d-4815-4fb4-7403bb3152b2" [ 999.684752] env[63028]: _type = "Task" [ 999.684752] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.694620] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5281c838-861d-4815-4fb4-7403bb3152b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.737349] env[63028]: DEBUG nova.compute.manager [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 999.738261] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 999.738591] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-811e85c5-7b44-45b1-bc05-8dbc4e8d3996 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.750090] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86630cb-e19d-4005-930f-02f2071bcf3e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.792730] env[63028]: WARNING nova.virt.vmwareapi.vmops [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19 could not be found. [ 999.792730] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 999.792730] env[63028]: INFO nova.compute.manager [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Took 0.05 seconds to destroy the instance on the hypervisor. [ 999.792730] env[63028]: DEBUG oslo.service.loopingcall [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 999.792730] env[63028]: DEBUG nova.compute.manager [-] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 999.792730] env[63028]: DEBUG nova.network.neutron [-] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 999.812681] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736050, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.815885] env[63028]: DEBUG oslo_concurrency.lockutils [None req-512f154c-941d-4be2-85d8-fd73b2fb3370 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Lock "3fb46d02-7914-4d08-b63b-f3447ba1b81a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.755s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.864762] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "a97224e8-d69b-4c62-ab96-7cef037ef39b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.865132] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "a97224e8-d69b-4c62-ab96-7cef037ef39b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.865365] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "a97224e8-d69b-4c62-ab96-7cef037ef39b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.865569] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "a97224e8-d69b-4c62-ab96-7cef037ef39b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.865740] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "a97224e8-d69b-4c62-ab96-7cef037ef39b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.870384] env[63028]: INFO nova.compute.manager [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Terminating instance [ 999.903674] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.125863] env[63028]: DEBUG oslo_concurrency.lockutils [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.126207] env[63028]: DEBUG oslo_concurrency.lockutils [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.126450] env[63028]: DEBUG oslo_concurrency.lockutils [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.126653] env[63028]: DEBUG oslo_concurrency.lockutils [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.126857] env[63028]: DEBUG oslo_concurrency.lockutils [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.129093] env[63028]: INFO nova.compute.manager [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Terminating instance [ 1000.139174] env[63028]: DEBUG nova.network.neutron [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Updated VIF entry in instance network info cache for port 296dfd9e-84e1-4ea8-bd17-28920a6a048b. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1000.139590] env[63028]: DEBUG nova.network.neutron [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Updating instance_info_cache with network_info: [{"id": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "address": "fa:16:3e:bc:87:07", "network": {"id": "37013470-5bda-41a6-826c-03ac0d423c85", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1214578902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d3fdfda1694b2f9f5985831ea77a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap296dfd9e-84", "ovs_interfaceid": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.154298] env[63028]: DEBUG nova.scheduler.client.report [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1000.195817] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Preparing fetch location {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1000.196310] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Fetch image to [datastore2] OSTACK_IMG_fd4c3593-06ab-4599-8421-dfbf260ea7c7/OSTACK_IMG_fd4c3593-06ab-4599-8421-dfbf260ea7c7.vmdk {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1000.196751] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Downloading stream optimized image 398e3010-a42d-4c4b-8604-11f5a3e99cff to [datastore2] OSTACK_IMG_fd4c3593-06ab-4599-8421-dfbf260ea7c7/OSTACK_IMG_fd4c3593-06ab-4599-8421-dfbf260ea7c7.vmdk on the data store datastore2 as vApp {{(pid=63028) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1000.197118] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Downloading image file data 398e3010-a42d-4c4b-8604-11f5a3e99cff to the ESX as VM named 'OSTACK_IMG_fd4c3593-06ab-4599-8421-dfbf260ea7c7' {{(pid=63028) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1000.311911] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1000.311911] env[63028]: value = "resgroup-9" [ 1000.311911] env[63028]: _type = "ResourcePool" [ 1000.311911] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1000.311911] env[63028]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-267d1b03-4d48-474d-92b7-7de8fa93d784 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.332336] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736050, 'name': ReconfigVM_Task, 'duration_secs': 1.400938} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.333258] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7/53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1000.337183] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8bea4bf3-2ea0-44ff-a4e5-d70014f189c7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.338460] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lease: (returnval){ [ 1000.338460] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529f24a1-fed4-ddee-4409-5bfa49a02ddb" [ 1000.338460] env[63028]: _type = "HttpNfcLease" [ 1000.338460] env[63028]: } obtained for vApp import into resource pool (val){ [ 1000.338460] env[63028]: value = "resgroup-9" [ 1000.338460] env[63028]: _type = "ResourcePool" [ 1000.338460] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1000.338762] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the lease: (returnval){ [ 1000.338762] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529f24a1-fed4-ddee-4409-5bfa49a02ddb" [ 1000.338762] env[63028]: _type = "HttpNfcLease" [ 1000.338762] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1000.342512] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Waiting for the task: (returnval){ [ 1000.342512] env[63028]: value = "task-2736054" [ 1000.342512] env[63028]: _type = "Task" [ 1000.342512] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.351884] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1000.351884] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529f24a1-fed4-ddee-4409-5bfa49a02ddb" [ 1000.351884] env[63028]: _type = "HttpNfcLease" [ 1000.351884] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1000.355141] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736054, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.362828] env[63028]: DEBUG nova.compute.manager [None req-10eceea7-8c29-449d-a9d5-c87435449a22 tempest-ServerDiagnosticsTest-1640536864 tempest-ServerDiagnosticsTest-1640536864-project-admin] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1000.364114] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1244e86d-8774-49fd-85ce-07489121bd97 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.372914] env[63028]: INFO nova.compute.manager [None req-10eceea7-8c29-449d-a9d5-c87435449a22 tempest-ServerDiagnosticsTest-1640536864 tempest-ServerDiagnosticsTest-1640536864-project-admin] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Retrieving diagnostics [ 1000.374278] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4715e99b-3989-4a4d-b0e4-0b56469f8f24 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.378810] env[63028]: DEBUG nova.compute.manager [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1000.378810] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1000.379591] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eba2703-6239-419b-80c6-abd76994fe34 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.422795] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.423555] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25b9f811-1eec-4e1d-9e65-62ec752853fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.432600] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736052, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.434526] env[63028]: DEBUG oslo_vmware.api [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 1000.434526] env[63028]: value = "task-2736055" [ 1000.434526] env[63028]: _type = "Task" [ 1000.434526] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.443857] env[63028]: DEBUG oslo_vmware.api [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2736055, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.494609] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.636044] env[63028]: DEBUG nova.compute.manager [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1000.636044] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1000.636044] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ce1ac5-ffe7-432b-ba15-30fd6f880c59 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.645106] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.646650] env[63028]: DEBUG oslo_concurrency.lockutils [req-a5805b5c-93bb-4f1b-a109-e6c993eb3cdc req-0809ca55-19ef-4122-a75a-18f892b3a0a3 service nova] Releasing lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.647190] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b0d88aa-c101-45ea-8ba6-538e1bd907a7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.656021] env[63028]: DEBUG oslo_vmware.api [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 1000.656021] env[63028]: value = "task-2736056" [ 1000.656021] env[63028]: _type = "Task" [ 1000.656021] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.662318] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.951s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.667810] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.856s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.668432] env[63028]: DEBUG nova.objects.instance [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lazy-loading 'resources' on Instance uuid c386c117-e255-4c3b-9a37-011e517277de {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.669829] env[63028]: DEBUG oslo_vmware.api [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2736056, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.680026] env[63028]: INFO nova.scheduler.client.report [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Deleted allocations for instance 455578fa-7468-40dc-8c0a-37ac35e5c0a0 [ 1000.725619] env[63028]: DEBUG oslo_concurrency.lockutils [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "interface-b77ba7d6-305e-4b60-a4b7-9353c12c3920-98722f90-f2d2-4a4a-9e68-ad5c32b18435" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.726008] env[63028]: DEBUG oslo_concurrency.lockutils [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-b77ba7d6-305e-4b60-a4b7-9353c12c3920-98722f90-f2d2-4a4a-9e68-ad5c32b18435" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.782400] env[63028]: DEBUG nova.network.neutron [-] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.851028] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1000.851028] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529f24a1-fed4-ddee-4409-5bfa49a02ddb" [ 1000.851028] env[63028]: _type = "HttpNfcLease" [ 1000.851028] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1000.855834] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736054, 'name': Rename_Task, 'duration_secs': 0.327943} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.855834] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1000.855834] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4567587-29cd-4c78-ad42-ca351f11ca77 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.863173] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Waiting for the task: (returnval){ [ 1000.863173] env[63028]: value = "task-2736057" [ 1000.863173] env[63028]: _type = "Task" [ 1000.863173] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.883012] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736057, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.929013] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736052, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.182214} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.930027] env[63028]: INFO nova.virt.vmwareapi.ds_util [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 8bb61bfa-d44e-4e06-867a-445d9b3db660/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk. [ 1000.931346] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-948cd519-e442-4b63-8987-6ebedb72946b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.966062] env[63028]: DEBUG oslo_vmware.api [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2736055, 'name': PowerOffVM_Task, 'duration_secs': 0.264556} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.974193] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 8bb61bfa-d44e-4e06-867a-445d9b3db660/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1000.974541] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.974760] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1000.974947] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2c3ed05-2485-4c39-bb7e-51d4d6e7cea6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.988644] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ebbab92c-d082-46a5-beec-66dad08b40db {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.995954] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1000.995954] env[63028]: value = "task-2736059" [ 1000.995954] env[63028]: _type = "Task" [ 1000.995954] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.006730] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736059, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.057921] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.058173] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.058363] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Deleting the datastore file [datastore1] a97224e8-d69b-4c62-ab96-7cef037ef39b {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.058633] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c105780-a705-47ae-b481-02f14dd17400 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.065765] env[63028]: DEBUG oslo_vmware.api [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 1001.065765] env[63028]: value = "task-2736060" [ 1001.065765] env[63028]: _type = "Task" [ 1001.065765] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.073825] env[63028]: DEBUG oslo_vmware.api [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2736060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.166634] env[63028]: DEBUG oslo_vmware.api [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2736056, 'name': PowerOffVM_Task, 'duration_secs': 0.140284} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.167155] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1001.167423] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1001.167732] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbd93424-aca6-46a7-b9fc-1eda137b6091 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.188311] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9079c8c2-b569-4e87-bc06-f3d8e91deb05 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459 tempest-FloatingIPsAssociationNegativeTestJSON-1722272459-project-member] Lock "455578fa-7468-40dc-8c0a-37ac35e5c0a0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.714s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.230087] env[63028]: DEBUG oslo_concurrency.lockutils [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.230415] env[63028]: DEBUG oslo_concurrency.lockutils [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.231447] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4da283-3d0d-408d-9d3f-c8b9a08fea7a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.255687] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73be0ce1-7ab8-43a5-9228-836bf20c80fb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.261189] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.261189] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.261189] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Deleting the datastore file [datastore2] e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.261189] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75a86481-d54f-461f-aaef-b24429e39496 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.266687] env[63028]: DEBUG oslo_vmware.api [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for the task: (returnval){ [ 1001.266687] env[63028]: value = "task-2736062" [ 1001.266687] env[63028]: _type = "Task" [ 1001.266687] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.291022] env[63028]: INFO nova.compute.manager [-] [instance: ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19] Took 1.50 seconds to deallocate network for instance. [ 1001.296724] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Reconfiguring VM to detach interface {{(pid=63028) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1001.308700] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa47048a-5825-4525-b0e9-023ddde74fee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.332337] env[63028]: DEBUG oslo_vmware.api [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2736062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.333623] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1001.333623] env[63028]: value = "task-2736063" [ 1001.333623] env[63028]: _type = "Task" [ 1001.333623] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.340598] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.350047] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1001.350047] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529f24a1-fed4-ddee-4409-5bfa49a02ddb" [ 1001.350047] env[63028]: _type = "HttpNfcLease" [ 1001.350047] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1001.350353] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1001.350353] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529f24a1-fed4-ddee-4409-5bfa49a02ddb" [ 1001.350353] env[63028]: _type = "HttpNfcLease" [ 1001.350353] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1001.351140] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c730113-c95c-4b1d-9c1c-984ab973558d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.361239] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52610927-f0f5-f93f-8f37-21957a56761d/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1001.361239] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52610927-f0f5-f93f-8f37-21957a56761d/disk-0.vmdk. {{(pid=63028) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1001.442348] env[63028]: DEBUG oslo_vmware.api [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736057, 'name': PowerOnVM_Task, 'duration_secs': 0.482439} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.445287] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.445536] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.446813] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1001.447276] env[63028]: INFO nova.compute.manager [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Took 9.81 seconds to spawn the instance on the hypervisor. [ 1001.447276] env[63028]: DEBUG nova.compute.manager [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1001.449580] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7decade0-5327-447c-9c2a-e24973b228ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.454259] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bcde41dd-2bef-4570-aabe-9af335596b37 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.513457] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736059, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.577930] env[63028]: DEBUG oslo_vmware.api [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2736060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216128} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.578222] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.578387] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.578569] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.578743] env[63028]: INFO nova.compute.manager [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1001.579191] env[63028]: DEBUG oslo.service.loopingcall [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1001.579191] env[63028]: DEBUG nova.compute.manager [-] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1001.579303] env[63028]: DEBUG nova.network.neutron [-] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1001.652398] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Acquiring lock "3fb46d02-7914-4d08-b63b-f3447ba1b81a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.653392] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Lock "3fb46d02-7914-4d08-b63b-f3447ba1b81a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.653392] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Acquiring lock "3fb46d02-7914-4d08-b63b-f3447ba1b81a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.653392] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Lock "3fb46d02-7914-4d08-b63b-f3447ba1b81a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.653392] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Lock "3fb46d02-7914-4d08-b63b-f3447ba1b81a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.655891] env[63028]: INFO nova.compute.manager [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Terminating instance [ 1001.767571] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbedd538-027f-4e1a-b8b1-43ecc7009add {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.778223] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc4eb5d-d66e-40d7-b80e-cd9592d7ee8e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.823248] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0491296-e28e-4e81-9109-b9ab56e23e2d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.834220] env[63028]: DEBUG oslo_vmware.api [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Task: {'id': task-2736062, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26884} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.841924] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.841924] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.841924] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.841924] env[63028]: INFO nova.compute.manager [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1001.841924] env[63028]: DEBUG oslo.service.loopingcall [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1001.851965] env[63028]: DEBUG nova.compute.manager [-] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1001.851965] env[63028]: DEBUG nova.network.neutron [-] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1001.852916] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf67c88-204e-4e18-b6ad-1f4359b14568 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.864583] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.872891] env[63028]: DEBUG nova.compute.provider_tree [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.957931] env[63028]: DEBUG nova.compute.manager [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1001.975903] env[63028]: INFO nova.compute.manager [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Took 28.29 seconds to build instance. [ 1002.010441] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736059, 'name': ReconfigVM_Task, 'duration_secs': 0.612192} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.010441] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 8bb61bfa-d44e-4e06-867a-445d9b3db660/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.010441] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c51979b-fdec-4016-b092-4c5a2134d089 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.042301] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c71ea761-8037-4593-bbb2-02aa05f48715 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.059545] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1002.059545] env[63028]: value = "task-2736064" [ 1002.059545] env[63028]: _type = "Task" [ 1002.059545] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.071015] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736064, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.160465] env[63028]: DEBUG nova.compute.manager [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1002.160465] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1002.161797] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a12c668-9a87-4f66-8705-b57b7ce097c2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.171407] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.171671] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26907c24-5b4c-4247-b2c4-c363c45ef24d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.181069] env[63028]: DEBUG oslo_vmware.api [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Waiting for the task: (returnval){ [ 1002.181069] env[63028]: value = "task-2736065" [ 1002.181069] env[63028]: _type = "Task" [ 1002.181069] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.194480] env[63028]: DEBUG oslo_vmware.api [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736065, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.346098] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.363326] env[63028]: DEBUG oslo_concurrency.lockutils [None req-56e62f6c-517c-4ef7-810a-239a4e51c428 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "ef2cec82-0c02-4c4d-b4b8-3a3fecea0c19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.140s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.381179] env[63028]: DEBUG nova.scheduler.client.report [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1002.482113] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5304d7dd-e594-479a-82a5-046691037310 tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Lock "53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.809s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.485120] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.574066] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736064, 'name': ReconfigVM_Task, 'duration_secs': 0.356147} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.574444] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.574756] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6bfd8bdd-afd9-4c16-8c39-c7ea43a0dc00 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.584574] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1002.584574] env[63028]: value = "task-2736066" [ 1002.584574] env[63028]: _type = "Task" [ 1002.584574] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.600058] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736066, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.631947] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Completed reading data from the image iterator. {{(pid=63028) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1002.632219] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52610927-f0f5-f93f-8f37-21957a56761d/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1002.633302] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0081596-b726-403f-aca6-e60c2748451e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.645310] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52610927-f0f5-f93f-8f37-21957a56761d/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1002.645310] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52610927-f0f5-f93f-8f37-21957a56761d/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1002.645310] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-50f28351-069b-4fbb-af85-215eed2b215d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.691342] env[63028]: DEBUG oslo_vmware.api [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736065, 'name': PowerOffVM_Task, 'duration_secs': 0.24815} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.691700] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1002.692049] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1002.692191] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa216d7e-2404-471d-aea0-917df0b2d8f6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.850166] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.885298] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.217s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.887630] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.733s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.887935] env[63028]: DEBUG nova.objects.instance [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lazy-loading 'resources' on Instance uuid da88308f-ce62-40af-adae-e38aa506bdd9 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.901199] env[63028]: DEBUG oslo_vmware.rw_handles [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52610927-f0f5-f93f-8f37-21957a56761d/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1002.901504] env[63028]: INFO nova.virt.vmwareapi.images [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Downloaded image file data 398e3010-a42d-4c4b-8604-11f5a3e99cff [ 1002.902439] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4efc0fb-14f8-4413-b2ff-c2d54c05b246 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.924101] env[63028]: INFO nova.scheduler.client.report [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted allocations for instance c386c117-e255-4c3b-9a37-011e517277de [ 1002.925194] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b52396f-bf97-4201-aece-123036117ed5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.953789] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1002.953789] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1002.953789] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Deleting the datastore file [datastore2] 3fb46d02-7914-4d08-b63b-f3447ba1b81a {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1002.954858] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0343574d-9e9a-4a8b-b38e-a9776b36c2ae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.958149] env[63028]: INFO nova.virt.vmwareapi.images [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] The imported VM was unregistered [ 1002.961376] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Caching image {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1002.961597] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Creating directory with path [datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.961872] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5872b2c3-63d4-4d85-abac-adf0961acd6e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.971747] env[63028]: DEBUG oslo_vmware.api [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Waiting for the task: (returnval){ [ 1002.971747] env[63028]: value = "task-2736069" [ 1002.971747] env[63028]: _type = "Task" [ 1002.971747] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.979760] env[63028]: DEBUG oslo_vmware.api [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.980881] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Created directory with path [datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.981085] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_fd4c3593-06ab-4599-8421-dfbf260ea7c7/OSTACK_IMG_fd4c3593-06ab-4599-8421-dfbf260ea7c7.vmdk to [datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff/398e3010-a42d-4c4b-8604-11f5a3e99cff.vmdk. {{(pid=63028) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1002.981367] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-f6710aa0-04cf-4cbd-80d7-d35d1f8a0ab5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.987466] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 1002.987466] env[63028]: value = "task-2736070" [ 1002.987466] env[63028]: _type = "Task" [ 1002.987466] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.995585] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736070, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.099025] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736066, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.277256] env[63028]: DEBUG nova.network.neutron [-] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.277256] env[63028]: DEBUG nova.network.neutron [-] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.348201] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.438338] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98d4fdad-5b7e-4f6c-913e-53467e1954bd tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "c386c117-e255-4c3b-9a37-011e517277de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.174s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.486090] env[63028]: DEBUG oslo_vmware.api [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Task: {'id': task-2736069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.324988} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.486467] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1003.486644] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1003.486844] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1003.488377] env[63028]: INFO nova.compute.manager [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1003.488377] env[63028]: DEBUG oslo.service.loopingcall [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1003.488518] env[63028]: DEBUG nova.compute.manager [-] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1003.489371] env[63028]: DEBUG nova.network.neutron [-] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1003.502995] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736070, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.597429] env[63028]: DEBUG oslo_vmware.api [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736066, 'name': PowerOnVM_Task, 'duration_secs': 0.630997} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.600483] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.603492] env[63028]: DEBUG nova.compute.manager [None req-32f1703d-6f07-46ed-86d8-68f7aa481601 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1003.604552] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf06a9b7-1435-4d1f-a72f-1b0ba0b66040 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.623015] env[63028]: DEBUG nova.compute.manager [req-91332b98-32dd-4048-a587-899a694836c2 req-7ebc79d9-eca0-4a81-ba60-b10c8a4ff1a9 service nova] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Received event network-vif-deleted-298e34ad-d45e-40ed-aa57-0a93af59226f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1003.623306] env[63028]: DEBUG nova.compute.manager [req-91332b98-32dd-4048-a587-899a694836c2 req-7ebc79d9-eca0-4a81-ba60-b10c8a4ff1a9 service nova] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Received event network-vif-deleted-bd7b8218-749b-4f6a-be35-834767306b21 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1003.643657] env[63028]: DEBUG nova.compute.manager [req-d179e911-4048-4b32-a0f9-275da45288bb req-f78685be-a859-4930-a2a2-a56710ecc336 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Received event network-changed-5dc30fb0-c128-49d6-a5d1-cd0f53cc9958 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1003.644038] env[63028]: DEBUG nova.compute.manager [req-d179e911-4048-4b32-a0f9-275da45288bb req-f78685be-a859-4930-a2a2-a56710ecc336 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Refreshing instance network info cache due to event network-changed-5dc30fb0-c128-49d6-a5d1-cd0f53cc9958. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1003.645247] env[63028]: DEBUG oslo_concurrency.lockutils [req-d179e911-4048-4b32-a0f9-275da45288bb req-f78685be-a859-4930-a2a2-a56710ecc336 service nova] Acquiring lock "refresh_cache-53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.645545] env[63028]: DEBUG oslo_concurrency.lockutils [req-d179e911-4048-4b32-a0f9-275da45288bb req-f78685be-a859-4930-a2a2-a56710ecc336 service nova] Acquired lock "refresh_cache-53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.645813] env[63028]: DEBUG nova.network.neutron [req-d179e911-4048-4b32-a0f9-275da45288bb req-f78685be-a859-4930-a2a2-a56710ecc336 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Refreshing network info cache for port 5dc30fb0-c128-49d6-a5d1-cd0f53cc9958 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1003.779267] env[63028]: INFO nova.compute.manager [-] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Took 1.93 seconds to deallocate network for instance. [ 1003.779597] env[63028]: INFO nova.compute.manager [-] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Took 2.20 seconds to deallocate network for instance. [ 1003.792570] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9d4bc6-a360-4d25-9fed-3d6987dee373 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.801491] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61790a60-de3f-4850-8bf7-197bf9f9f1dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.850864] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ae1efb-b1a7-4f92-9967-a260877fec39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.862853] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.868709] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c85543d-0c6f-4048-93b2-a41abe3e2ee6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.889396] env[63028]: DEBUG nova.compute.provider_tree [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.002841] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736070, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.092534] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "c492dea4-9779-4460-a559-5b82fb0643f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.092851] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "c492dea4-9779-4460-a559-5b82fb0643f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.293650] env[63028]: DEBUG oslo_concurrency.lockutils [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.297738] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.358492] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.395403] env[63028]: DEBUG nova.scheduler.client.report [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1004.505619] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736070, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.596188] env[63028]: DEBUG nova.network.neutron [req-d179e911-4048-4b32-a0f9-275da45288bb req-f78685be-a859-4930-a2a2-a56710ecc336 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Updated VIF entry in instance network info cache for port 5dc30fb0-c128-49d6-a5d1-cd0f53cc9958. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1004.596805] env[63028]: DEBUG nova.network.neutron [req-d179e911-4048-4b32-a0f9-275da45288bb req-f78685be-a859-4930-a2a2-a56710ecc336 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Updating instance_info_cache with network_info: [{"id": "5dc30fb0-c128-49d6-a5d1-cd0f53cc9958", "address": "fa:16:3e:8c:5e:8c", "network": {"id": "62aa2116-8683-4ddd-a3e5-0b23a102cee0", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-251424440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e09fff22285147c29cb2bb096bde3921", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc30fb0-c1", "ovs_interfaceid": "5dc30fb0-c128-49d6-a5d1-cd0f53cc9958", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.598958] env[63028]: DEBUG nova.compute.manager [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1004.722497] env[63028]: DEBUG nova.network.neutron [-] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.862347] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.904276] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.014s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.905229] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.831s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.908707] env[63028]: INFO nova.compute.claims [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1004.936754] env[63028]: INFO nova.scheduler.client.report [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Deleted allocations for instance da88308f-ce62-40af-adae-e38aa506bdd9 [ 1005.012315] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736070, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.084346] env[63028]: INFO nova.compute.manager [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Unrescuing [ 1005.084710] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.084810] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquired lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.084976] env[63028]: DEBUG nova.network.neutron [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1005.103304] env[63028]: DEBUG oslo_concurrency.lockutils [req-d179e911-4048-4b32-a0f9-275da45288bb req-f78685be-a859-4930-a2a2-a56710ecc336 service nova] Releasing lock "refresh_cache-53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.123631] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.164364] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Acquiring lock "da23282a-bbda-47bf-9d9c-337ee9996779" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.164647] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Lock "da23282a-bbda-47bf-9d9c-337ee9996779" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.225201] env[63028]: INFO nova.compute.manager [-] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Took 1.74 seconds to deallocate network for instance. [ 1005.293697] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "b3930760-1888-4f80-85d8-65120a25f275" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.293697] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "b3930760-1888-4f80-85d8-65120a25f275" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.360359] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.450163] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9f9e38f4-3d16-4f1d-b6e1-f2fbbef67673 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "da88308f-ce62-40af-adae-e38aa506bdd9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.101s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.505110] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736070, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.652389] env[63028]: DEBUG nova.compute.manager [req-078752e3-e33b-4ed9-84e3-1489cc6f8ed1 req-b5347659-c62e-40af-977e-97c551015688 service nova] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Received event network-vif-deleted-eb3a6303-8ef2-4d1d-bcbc-58838b430d84 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1005.667507] env[63028]: DEBUG nova.compute.manager [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1005.738162] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.796319] env[63028]: DEBUG nova.compute.manager [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1005.817405] env[63028]: DEBUG nova.network.neutron [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updating instance_info_cache with network_info: [{"id": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "address": "fa:16:3e:cb:aa:63", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0d7c3ce-e8", "ovs_interfaceid": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.859970] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.010598] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736070, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.896842} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.010948] env[63028]: INFO nova.virt.vmwareapi.ds_util [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_fd4c3593-06ab-4599-8421-dfbf260ea7c7/OSTACK_IMG_fd4c3593-06ab-4599-8421-dfbf260ea7c7.vmdk to [datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff/398e3010-a42d-4c4b-8604-11f5a3e99cff.vmdk. [ 1006.011222] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Cleaning up location [datastore2] OSTACK_IMG_fd4c3593-06ab-4599-8421-dfbf260ea7c7 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1006.011659] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_fd4c3593-06ab-4599-8421-dfbf260ea7c7 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1006.011760] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1a28b2e-e45d-4660-8613-fb86f53632d4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.018982] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 1006.018982] env[63028]: value = "task-2736071" [ 1006.018982] env[63028]: _type = "Task" [ 1006.018982] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.028437] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736071, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.193060] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.317662] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82bd311-493f-4478-909d-2996fdc6ee58 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.321565] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.322096] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Releasing lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.322683] env[63028]: DEBUG nova.objects.instance [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lazy-loading 'flavor' on Instance uuid 8bb61bfa-d44e-4e06-867a-445d9b3db660 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1006.328995] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ee5f5a-e39c-4a88-a402-df5073b6c9f6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.365779] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2700ccb3-4101-4e61-be75-d8dac43f5b1c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.377953] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbecaf28-8aa3-4fa8-b4fe-e98f16fbfa75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.381856] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.392853] env[63028]: DEBUG nova.compute.provider_tree [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.529502] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736071, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195585} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.529779] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1006.529947] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Releasing lock "[datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff/398e3010-a42d-4c4b-8604-11f5a3e99cff.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.530223] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff/398e3010-a42d-4c4b-8604-11f5a3e99cff.vmdk to [datastore2] 63524cd8-81de-419f-bb07-0326f3cb393f/63524cd8-81de-419f-bb07-0326f3cb393f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1006.530470] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84795144-8b8b-4409-adda-a10b144d44b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.536993] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 1006.536993] env[63028]: value = "task-2736072" [ 1006.536993] env[63028]: _type = "Task" [ 1006.536993] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.544685] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736072, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.829633] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f37908-5ef1-4f25-b6f0-7cd1d4a0931e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.854309] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.855031] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e97ca6f5-edf5-4262-83c2-7ccb2b396f2b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.862176] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1006.862176] env[63028]: value = "task-2736073" [ 1006.862176] env[63028]: _type = "Task" [ 1006.862176] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.874454] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.877324] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.896575] env[63028]: DEBUG nova.scheduler.client.report [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1007.047230] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736072, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.258490] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "f804ec95-0b97-4960-844d-b678b97fc401" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.258736] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.377125] env[63028]: DEBUG oslo_vmware.api [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736063, 'name': ReconfigVM_Task, 'duration_secs': 5.749707} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.380375] env[63028]: DEBUG oslo_concurrency.lockutils [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.380693] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Reconfigured VM to detach interface {{(pid=63028) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1007.383624] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.401409] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.401946] env[63028]: DEBUG nova.compute.manager [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1007.404602] env[63028]: DEBUG oslo_concurrency.lockutils [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.728s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.404767] env[63028]: DEBUG oslo_concurrency.lockutils [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.406965] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.913s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.407202] env[63028]: DEBUG nova.objects.instance [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lazy-loading 'pci_requests' on Instance uuid 85aafadb-81d6-4687-aed1-fbe829e5f95f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.430566] env[63028]: INFO nova.scheduler.client.report [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted allocations for instance ed872f21-c2c4-4597-8c9e-9f8d2202b707 [ 1007.548371] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736072, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.680056] env[63028]: DEBUG nova.compute.manager [req-71aa8e3d-674a-4cc2-95be-b63c6852394a req-6d621fcb-dba0-430c-bdd1-fbe0be1360cc service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received event network-vif-deleted-98722f90-f2d2-4a4a-9e68-ad5c32b18435 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1007.680320] env[63028]: INFO nova.compute.manager [req-71aa8e3d-674a-4cc2-95be-b63c6852394a req-6d621fcb-dba0-430c-bdd1-fbe0be1360cc service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Neutron deleted interface 98722f90-f2d2-4a4a-9e68-ad5c32b18435; detaching it from the instance and deleting it from the info cache [ 1007.681142] env[63028]: DEBUG nova.network.neutron [req-71aa8e3d-674a-4cc2-95be-b63c6852394a req-6d621fcb-dba0-430c-bdd1-fbe0be1360cc service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updating instance_info_cache with network_info: [{"id": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "address": "fa:16:3e:90:60:0c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9efd2ef2-d3", "ovs_interfaceid": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d0308a48-57ab-41f7-bbab-6871ed89c5f2", "address": "fa:16:3e:c8:52:ef", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0308a48-57", "ovs_interfaceid": "d0308a48-57ab-41f7-bbab-6871ed89c5f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.761810] env[63028]: DEBUG nova.compute.manager [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1007.878733] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736073, 'name': PowerOffVM_Task, 'duration_secs': 0.721993} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.879113] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.885731] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Reconfiguring VM instance instance-00000050 to detach disk 2002 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1007.886085] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4037575d-3202-48a2-b7bc-dd138f6dc704 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.906188] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1007.906188] env[63028]: value = "task-2736074" [ 1007.906188] env[63028]: _type = "Task" [ 1007.906188] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.913525] env[63028]: DEBUG nova.compute.utils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1007.917759] env[63028]: DEBUG nova.objects.instance [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lazy-loading 'numa_topology' on Instance uuid 85aafadb-81d6-4687-aed1-fbe829e5f95f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.919644] env[63028]: DEBUG nova.compute.manager [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1007.920267] env[63028]: DEBUG nova.network.neutron [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1007.932686] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.941431] env[63028]: DEBUG oslo_concurrency.lockutils [None req-339f9283-11da-4056-ad23-de15d610fd36 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "ed872f21-c2c4-4597-8c9e-9f8d2202b707" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.939s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.989645] env[63028]: DEBUG nova.policy [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8478e45562394a0d8fafc5e3e1218fd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05118b378b5e4d838962db2378b381bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1008.054393] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736072, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.184437] env[63028]: DEBUG oslo_concurrency.lockutils [req-71aa8e3d-674a-4cc2-95be-b63c6852394a req-6d621fcb-dba0-430c-bdd1-fbe0be1360cc service nova] Acquiring lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.184724] env[63028]: DEBUG oslo_concurrency.lockutils [req-71aa8e3d-674a-4cc2-95be-b63c6852394a req-6d621fcb-dba0-430c-bdd1-fbe0be1360cc service nova] Acquired lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.185660] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96a071a-a8f9-4643-95eb-58fd6330d6f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.205821] env[63028]: DEBUG oslo_concurrency.lockutils [req-71aa8e3d-674a-4cc2-95be-b63c6852394a req-6d621fcb-dba0-430c-bdd1-fbe0be1360cc service nova] Releasing lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.206198] env[63028]: WARNING nova.compute.manager [req-71aa8e3d-674a-4cc2-95be-b63c6852394a req-6d621fcb-dba0-430c-bdd1-fbe0be1360cc service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Detach interface failed, port_id=98722f90-f2d2-4a4a-9e68-ad5c32b18435, reason: No device with interface-id 98722f90-f2d2-4a4a-9e68-ad5c32b18435 exists on VM: nova.exception.NotFound: No device with interface-id 98722f90-f2d2-4a4a-9e68-ad5c32b18435 exists on VM [ 1008.291545] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.417296] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.419987] env[63028]: DEBUG nova.compute.manager [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1008.424495] env[63028]: INFO nova.compute.claims [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1008.540122] env[63028]: DEBUG nova.network.neutron [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Successfully created port: 1f5c01d1-9623-425e-8309-336dd1d961fa {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1008.553088] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736072, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.603300] env[63028]: DEBUG oslo_concurrency.lockutils [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.603300] env[63028]: DEBUG oslo_concurrency.lockutils [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.603300] env[63028]: DEBUG nova.network.neutron [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1008.917327] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.052825] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736072, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.060483] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.060771] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.060965] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.061156] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.061327] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.063842] env[63028]: INFO nova.compute.manager [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Terminating instance [ 1009.423712] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.428901] env[63028]: DEBUG nova.compute.manager [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1009.460956] env[63028]: DEBUG nova.virt.hardware [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1009.461229] env[63028]: DEBUG nova.virt.hardware [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.461388] env[63028]: DEBUG nova.virt.hardware [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1009.461590] env[63028]: DEBUG nova.virt.hardware [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1009.461735] env[63028]: DEBUG nova.virt.hardware [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1009.461883] env[63028]: DEBUG nova.virt.hardware [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1009.462120] env[63028]: DEBUG nova.virt.hardware [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1009.462315] env[63028]: DEBUG nova.virt.hardware [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1009.462490] env[63028]: DEBUG nova.virt.hardware [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1009.462657] env[63028]: DEBUG nova.virt.hardware [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1009.462832] env[63028]: DEBUG nova.virt.hardware [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1009.463694] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475807e6-c7b3-48b5-9167-be1e53c907c0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.476617] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bd42a1-9811-4932-842e-58858d63c37d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.560145] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736072, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.915495} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.560637] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/398e3010-a42d-4c4b-8604-11f5a3e99cff/398e3010-a42d-4c4b-8604-11f5a3e99cff.vmdk to [datastore2] 63524cd8-81de-419f-bb07-0326f3cb393f/63524cd8-81de-419f-bb07-0326f3cb393f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1009.561650] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d1099d-3ebc-454e-8fa7-78cfab5ed57d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.576627] env[63028]: DEBUG nova.compute.manager [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1009.576852] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.585626] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 63524cd8-81de-419f-bb07-0326f3cb393f/63524cd8-81de-419f-bb07-0326f3cb393f.vmdk or device None with type streamOptimized {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1009.591032] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc1cd12-24bd-409e-b047-2079026213fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.593930] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b4e59a1-843e-4f0c-901e-72b6eb3bb1a5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.614740] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1009.615957] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29ba7187-66ff-408e-a759-e29b3b3530a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.617537] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 1009.617537] env[63028]: value = "task-2736075" [ 1009.617537] env[63028]: _type = "Task" [ 1009.617537] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.624278] env[63028]: DEBUG oslo_vmware.api [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1009.624278] env[63028]: value = "task-2736076" [ 1009.624278] env[63028]: _type = "Task" [ 1009.624278] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.632551] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736075, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.637490] env[63028]: DEBUG oslo_vmware.api [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.713672] env[63028]: DEBUG nova.compute.manager [req-76fb598a-587e-4cd6-bdcf-5e759089563c req-ff420fb6-6a3f-480c-aba2-2852092b4662 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received event network-vif-deleted-d0308a48-57ab-41f7-bbab-6871ed89c5f2 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1009.713672] env[63028]: INFO nova.compute.manager [req-76fb598a-587e-4cd6-bdcf-5e759089563c req-ff420fb6-6a3f-480c-aba2-2852092b4662 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Neutron deleted interface d0308a48-57ab-41f7-bbab-6871ed89c5f2; detaching it from the instance and deleting it from the info cache [ 1009.713672] env[63028]: DEBUG nova.network.neutron [req-76fb598a-587e-4cd6-bdcf-5e759089563c req-ff420fb6-6a3f-480c-aba2-2852092b4662 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updating instance_info_cache with network_info: [{"id": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "address": "fa:16:3e:90:60:0c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9efd2ef2-d3", "ovs_interfaceid": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.726689] env[63028]: INFO nova.network.neutron [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Port d0308a48-57ab-41f7-bbab-6871ed89c5f2 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1009.727065] env[63028]: DEBUG nova.network.neutron [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updating instance_info_cache with network_info: [{"id": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "address": "fa:16:3e:90:60:0c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9efd2ef2-d3", "ovs_interfaceid": "9efd2ef2-d319-4038-ab28-44a46bd597d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.854886] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c671ace7-3381-412c-be78-43c7db407ecb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.861214] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158d3024-9f14-449f-8818-edde6bd7f227 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.894413] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399c1c0a-c9bf-4ff8-a3dd-acd93af502dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.901527] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6767a28f-2ca3-4bf1-aa6a-3261818d783c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.914627] env[63028]: DEBUG nova.compute.provider_tree [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.924014] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.127838] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.135626] env[63028]: DEBUG oslo_vmware.api [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736076, 'name': PowerOffVM_Task, 'duration_secs': 0.221143} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.135937] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1010.136238] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1010.136577] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d0fdbab-ebd7-41ef-a7b8-1e6d491a3238 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.214517] env[63028]: DEBUG oslo_concurrency.lockutils [req-76fb598a-587e-4cd6-bdcf-5e759089563c req-ff420fb6-6a3f-480c-aba2-2852092b4662 service nova] Acquiring lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.230091] env[63028]: DEBUG nova.network.neutron [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Successfully updated port: 1f5c01d1-9623-425e-8309-336dd1d961fa {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1010.231832] env[63028]: DEBUG oslo_concurrency.lockutils [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.240388] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1010.240594] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1010.240836] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Deleting the datastore file [datastore1] b77ba7d6-305e-4b60-a4b7-9353c12c3920 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.241166] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c44738e1-9f7b-460b-b572-165cfa95731a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.248204] env[63028]: DEBUG oslo_vmware.api [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1010.248204] env[63028]: value = "task-2736078" [ 1010.248204] env[63028]: _type = "Task" [ 1010.248204] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.257541] env[63028]: DEBUG oslo_vmware.api [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.419467] env[63028]: DEBUG nova.scheduler.client.report [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1010.428079] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736074, 'name': ReconfigVM_Task, 'duration_secs': 2.366264} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.428407] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Reconfigured VM instance instance-00000050 to detach disk 2002 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1010.428519] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1010.428761] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-825b0d1f-0a55-4108-b37c-bb2d9c410e42 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.435299] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1010.435299] env[63028]: value = "task-2736079" [ 1010.435299] env[63028]: _type = "Task" [ 1010.435299] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.443194] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736079, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.627935] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.740995] env[63028]: DEBUG oslo_concurrency.lockutils [None req-077d2597-ca60-4717-9e81-198eecf87101 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-b77ba7d6-305e-4b60-a4b7-9353c12c3920-98722f90-f2d2-4a4a-9e68-ad5c32b18435" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.014s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.741322] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.741421] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.741582] env[63028]: DEBUG nova.network.neutron [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1010.760941] env[63028]: DEBUG oslo_vmware.api [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.926077] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.519s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.928404] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.443s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.930404] env[63028]: INFO nova.compute.claims [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1010.946566] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736079, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.962335] env[63028]: INFO nova.network.neutron [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updating port 2e2d8403-826c-4e24-ba3c-123d444d1fdc with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1011.128875] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736075, 'name': ReconfigVM_Task, 'duration_secs': 1.063125} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.129163] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 63524cd8-81de-419f-bb07-0326f3cb393f/63524cd8-81de-419f-bb07-0326f3cb393f.vmdk or device None with type streamOptimized {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.129808] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4dad7a1e-8339-47be-95aa-e63ce5b72cbe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.136204] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 1011.136204] env[63028]: value = "task-2736080" [ 1011.136204] env[63028]: _type = "Task" [ 1011.136204] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.144195] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736080, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.258935] env[63028]: DEBUG oslo_vmware.api [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.585438} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.259224] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.259412] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1011.259585] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1011.259763] env[63028]: INFO nova.compute.manager [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1011.260014] env[63028]: DEBUG oslo.service.loopingcall [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.260211] env[63028]: DEBUG nova.compute.manager [-] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1011.260307] env[63028]: DEBUG nova.network.neutron [-] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1011.299375] env[63028]: DEBUG nova.network.neutron [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1011.340882] env[63028]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port d0308a48-57ab-41f7-bbab-6871ed89c5f2 could not be found.", "detail": ""}} {{(pid=63028) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1011.341213] env[63028]: DEBUG nova.network.neutron [-] Unable to show port d0308a48-57ab-41f7-bbab-6871ed89c5f2 as it no longer exists. {{(pid=63028) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1011.450714] env[63028]: DEBUG oslo_vmware.api [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736079, 'name': PowerOnVM_Task, 'duration_secs': 0.629763} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.450714] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1011.450714] env[63028]: DEBUG nova.compute.manager [None req-cf862dd6-1fc3-4802-9dc2-4824c9eed16f tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1011.451048] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4e7694-6149-4caf-90b4-14c83fc4129f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.572983] env[63028]: DEBUG nova.network.neutron [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance_info_cache with network_info: [{"id": "1f5c01d1-9623-425e-8309-336dd1d961fa", "address": "fa:16:3e:7a:87:25", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f5c01d1-96", "ovs_interfaceid": "1f5c01d1-9623-425e-8309-336dd1d961fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.645439] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736080, 'name': Rename_Task, 'duration_secs': 0.277245} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.645732] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.645975] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4503ceed-19f7-45aa-a76b-5d942ee32742 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.651969] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 1011.651969] env[63028]: value = "task-2736081" [ 1011.651969] env[63028]: _type = "Task" [ 1011.651969] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.661152] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736081, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.742737] env[63028]: DEBUG nova.compute.manager [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Received event network-vif-plugged-1f5c01d1-9623-425e-8309-336dd1d961fa {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1011.742950] env[63028]: DEBUG oslo_concurrency.lockutils [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] Acquiring lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.743164] env[63028]: DEBUG oslo_concurrency.lockutils [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] Lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.743336] env[63028]: DEBUG oslo_concurrency.lockutils [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] Lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.743656] env[63028]: DEBUG nova.compute.manager [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] No waiting events found dispatching network-vif-plugged-1f5c01d1-9623-425e-8309-336dd1d961fa {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1011.743656] env[63028]: WARNING nova.compute.manager [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Received unexpected event network-vif-plugged-1f5c01d1-9623-425e-8309-336dd1d961fa for instance with vm_state building and task_state spawning. [ 1011.744599] env[63028]: DEBUG nova.compute.manager [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Received event network-changed-1f5c01d1-9623-425e-8309-336dd1d961fa {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1011.744599] env[63028]: DEBUG nova.compute.manager [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Refreshing instance network info cache due to event network-changed-1f5c01d1-9623-425e-8309-336dd1d961fa. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1011.744599] env[63028]: DEBUG oslo_concurrency.lockutils [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] Acquiring lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.076452] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.076782] env[63028]: DEBUG nova.compute.manager [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Instance network_info: |[{"id": "1f5c01d1-9623-425e-8309-336dd1d961fa", "address": "fa:16:3e:7a:87:25", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f5c01d1-96", "ovs_interfaceid": "1f5c01d1-9623-425e-8309-336dd1d961fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1012.077675] env[63028]: DEBUG oslo_concurrency.lockutils [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] Acquired lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.077910] env[63028]: DEBUG nova.network.neutron [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Refreshing network info cache for port 1f5c01d1-9623-425e-8309-336dd1d961fa {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1012.079214] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:87:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5446413d-c3b0-4cd2-a962-62240db178ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f5c01d1-9623-425e-8309-336dd1d961fa', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1012.095187] env[63028]: DEBUG oslo.service.loopingcall [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.099034] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1012.099979] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e99c96de-71f4-4a90-82a0-ba19096d1e6f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.124623] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1012.124623] env[63028]: value = "task-2736082" [ 1012.124623] env[63028]: _type = "Task" [ 1012.124623] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.135094] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736082, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.164765] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736081, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.309426] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9228fa-998e-415c-8748-15f2a0838836 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.318300] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147e1819-7fa3-4923-a7e1-d1298ae80ed9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.351760] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1733f594-a05b-4371-8855-f3bc26eb18cd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.361535] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3b858f-651b-4e59-8d5c-58158d9a1288 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.375395] env[63028]: DEBUG nova.compute.provider_tree [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.406939] env[63028]: DEBUG nova.network.neutron [-] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.636139] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736082, 'name': CreateVM_Task, 'duration_secs': 0.410133} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.638797] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1012.639496] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.639669] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.640162] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1012.640520] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc3a7809-e64c-42c8-95be-c64753ad8e1e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.645736] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1012.645736] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529f5265-9220-affb-576c-fbad2c1c3c7b" [ 1012.645736] env[63028]: _type = "Task" [ 1012.645736] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.653654] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529f5265-9220-affb-576c-fbad2c1c3c7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.660935] env[63028]: DEBUG oslo_vmware.api [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736081, 'name': PowerOnVM_Task, 'duration_secs': 0.825383} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.661187] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.730580] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.730580] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.730691] env[63028]: DEBUG nova.network.neutron [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1012.823268] env[63028]: DEBUG nova.compute.manager [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1012.824362] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da71eb8-2276-4831-be93-292a6634e7f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.865742] env[63028]: DEBUG nova.network.neutron [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updated VIF entry in instance network info cache for port 1f5c01d1-9623-425e-8309-336dd1d961fa. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1012.865742] env[63028]: DEBUG nova.network.neutron [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance_info_cache with network_info: [{"id": "1f5c01d1-9623-425e-8309-336dd1d961fa", "address": "fa:16:3e:7a:87:25", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f5c01d1-96", "ovs_interfaceid": "1f5c01d1-9623-425e-8309-336dd1d961fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.878977] env[63028]: DEBUG nova.scheduler.client.report [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1012.910196] env[63028]: INFO nova.compute.manager [-] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Took 1.65 seconds to deallocate network for instance. [ 1013.156714] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529f5265-9220-affb-576c-fbad2c1c3c7b, 'name': SearchDatastore_Task, 'duration_secs': 0.024245} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.157143] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.157493] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1013.157864] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.158128] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.158422] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1013.158767] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-771c0867-b730-446f-924a-dea663beb278 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.169152] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1013.169380] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1013.170104] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19e5de7c-9c6b-4d3b-b5bf-aa99e333f309 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.175444] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1013.175444] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5239c694-600a-2672-6c95-2648374ad94e" [ 1013.175444] env[63028]: _type = "Task" [ 1013.175444] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.183118] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5239c694-600a-2672-6c95-2648374ad94e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.349077] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dbceb504-3808-466a-a2c4-f653c8d1dbc7 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "63524cd8-81de-419f-bb07-0326f3cb393f" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 28.277s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.368955] env[63028]: DEBUG oslo_concurrency.lockutils [req-57da08ae-9ae7-4b73-a64f-bc91c5e7c7bf req-1b82290f-5849-4a3c-b386-24deccb20bd0 service nova] Releasing lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.384967] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.457s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.385882] env[63028]: DEBUG nova.compute.manager [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1013.388124] env[63028]: DEBUG oslo_concurrency.lockutils [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.095s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.388365] env[63028]: DEBUG nova.objects.instance [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lazy-loading 'resources' on Instance uuid e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1013.416862] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.540042] env[63028]: DEBUG nova.network.neutron [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updating instance_info_cache with network_info: [{"id": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "address": "fa:16:3e:09:d0:a2", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d8403-82", "ovs_interfaceid": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.687452] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5239c694-600a-2672-6c95-2648374ad94e, 'name': SearchDatastore_Task, 'duration_secs': 0.011696} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.688331] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ccc0692-b957-4622-83c0-ce3db0c3cd5b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.693733] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1013.693733] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5257a3e2-87f8-e596-c913-046074e23436" [ 1013.693733] env[63028]: _type = "Task" [ 1013.693733] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.701869] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5257a3e2-87f8-e596-c913-046074e23436, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.787719] env[63028]: DEBUG nova.compute.manager [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Received event network-vif-deleted-9efd2ef2-d319-4038-ab28-44a46bd597d8 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1013.787917] env[63028]: DEBUG nova.compute.manager [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Received event network-changed-b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1013.788423] env[63028]: DEBUG nova.compute.manager [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Refreshing instance network info cache due to event network-changed-b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1013.788423] env[63028]: DEBUG oslo_concurrency.lockutils [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] Acquiring lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.788423] env[63028]: DEBUG oslo_concurrency.lockutils [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] Acquired lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.788698] env[63028]: DEBUG nova.network.neutron [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Refreshing network info cache for port b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1013.891473] env[63028]: DEBUG nova.compute.utils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1013.898021] env[63028]: DEBUG nova.compute.manager [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1013.898021] env[63028]: DEBUG nova.network.neutron [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1013.950018] env[63028]: DEBUG nova.policy [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc25e0b534f448c8b7bc90f53fdbfba6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bae448aa28a84aa6863fffc24a5448fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1014.042704] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.071336] env[63028]: DEBUG nova.virt.hardware [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='2de5472dd64b5a7fda0ac4cc6045cd82',container_format='bare',created_at=2025-02-20T18:01:04Z,direct_url=,disk_format='vmdk',id=89f02435-fa22-4275-ab99-73170c1e53cf,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1634269026-shelved',owner='11332c2adbdc41928d4bf084435e2037',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2025-02-20T18:01:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1014.071585] env[63028]: DEBUG nova.virt.hardware [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1014.071741] env[63028]: DEBUG nova.virt.hardware [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1014.071924] env[63028]: DEBUG nova.virt.hardware [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1014.072081] env[63028]: DEBUG nova.virt.hardware [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1014.072237] env[63028]: DEBUG nova.virt.hardware [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1014.072448] env[63028]: DEBUG nova.virt.hardware [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1014.072602] env[63028]: DEBUG nova.virt.hardware [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1014.072764] env[63028]: DEBUG nova.virt.hardware [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1014.072925] env[63028]: DEBUG nova.virt.hardware [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1014.073114] env[63028]: DEBUG nova.virt.hardware [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1014.073982] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8808282-6317-4309-9e57-96587bbf82d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.084479] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1d1b7f-ec94-463a-b37c-773c10cfad4f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.100111] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:d0:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f01bbee7-8b9a-46be-891e-59d8142fb359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e2d8403-826c-4e24-ba3c-123d444d1fdc', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1014.107433] env[63028]: DEBUG oslo.service.loopingcall [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1014.109946] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1014.110363] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54c4f782-900f-4539-9ac2-4a74137200eb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.131732] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1014.131732] env[63028]: value = "task-2736083" [ 1014.131732] env[63028]: _type = "Task" [ 1014.131732] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.138921] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736083, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.206798] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5257a3e2-87f8-e596-c913-046074e23436, 'name': SearchDatastore_Task, 'duration_secs': 0.028541} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.209119] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.209390] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6/56e6ade9-893b-4c85-b0b8-e9f7b12cbad6.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1014.209924] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ac55ca7-1e8c-43cf-b1b4-8ce9629d6400 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.217362] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1014.217362] env[63028]: value = "task-2736084" [ 1014.217362] env[63028]: _type = "Task" [ 1014.217362] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.228456] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736084, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.234111] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c1447c-5673-42ff-a5dc-c4b37e6cdebd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.241212] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2966cf72-d3fa-4991-8a92-0f6802c23c5e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.274447] env[63028]: DEBUG nova.network.neutron [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Successfully created port: 47a80dc6-e819-410f-8257-cfc2cce0d3d9 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1014.277055] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cf9064-a626-4da6-a217-7b0fce649e02 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.285136] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1838b74a-1645-4a7a-89a5-4fa2ef007523 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.303624] env[63028]: DEBUG nova.compute.provider_tree [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.395007] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc2fc53-087d-4d7f-b894-0fb9ab262ecc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.401928] env[63028]: DEBUG nova.compute.manager [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1014.408677] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-096e785d-d7b3-4b6c-a2b1-27d70c79e854 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Suspending the VM {{(pid=63028) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1014.408988] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-21a4b194-b36c-4874-9a15-25acbdf2ea4f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.416126] env[63028]: DEBUG oslo_vmware.api [None req-096e785d-d7b3-4b6c-a2b1-27d70c79e854 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 1014.416126] env[63028]: value = "task-2736085" [ 1014.416126] env[63028]: _type = "Task" [ 1014.416126] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.424423] env[63028]: DEBUG oslo_vmware.api [None req-096e785d-d7b3-4b6c-a2b1-27d70c79e854 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736085, 'name': SuspendVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.643356] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736083, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.698084] env[63028]: DEBUG nova.network.neutron [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updated VIF entry in instance network info cache for port b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1014.699972] env[63028]: DEBUG nova.network.neutron [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updating instance_info_cache with network_info: [{"id": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "address": "fa:16:3e:cb:aa:63", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0d7c3ce-e8", "ovs_interfaceid": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.729362] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736084, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.803934] env[63028]: DEBUG nova.scheduler.client.report [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1014.926376] env[63028]: DEBUG oslo_vmware.api [None req-096e785d-d7b3-4b6c-a2b1-27d70c79e854 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736085, 'name': SuspendVM_Task} progress is 50%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.141918] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736083, 'name': CreateVM_Task, 'duration_secs': 0.584555} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.141918] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1015.142542] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.142698] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "[datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.143110] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1015.143340] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c609f3bf-4c9c-4845-8b33-2e4cf621e8f6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.148284] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1015.148284] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e71392-c282-35c8-37b9-1b1420fa7201" [ 1015.148284] env[63028]: _type = "Task" [ 1015.148284] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.157457] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e71392-c282-35c8-37b9-1b1420fa7201, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.201322] env[63028]: DEBUG oslo_concurrency.lockutils [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] Releasing lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.201592] env[63028]: DEBUG nova.compute.manager [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Received event network-changed-b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1015.201765] env[63028]: DEBUG nova.compute.manager [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Refreshing instance network info cache due to event network-changed-b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1015.201979] env[63028]: DEBUG oslo_concurrency.lockutils [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] Acquiring lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.202186] env[63028]: DEBUG oslo_concurrency.lockutils [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] Acquired lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.202335] env[63028]: DEBUG nova.network.neutron [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Refreshing network info cache for port b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1015.228811] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736084, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.824294} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.228967] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6/56e6ade9-893b-4c85-b0b8-e9f7b12cbad6.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1015.229196] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1015.229448] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7830c4a6-64d6-4e7a-8535-24f22687ba58 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.238019] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1015.238019] env[63028]: value = "task-2736086" [ 1015.238019] env[63028]: _type = "Task" [ 1015.238019] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.244140] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736086, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.309241] env[63028]: DEBUG oslo_concurrency.lockutils [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.921s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.311557] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.014s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.311794] env[63028]: DEBUG nova.objects.instance [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lazy-loading 'resources' on Instance uuid a97224e8-d69b-4c62-ab96-7cef037ef39b {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.332282] env[63028]: INFO nova.scheduler.client.report [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Deleted allocations for instance e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9 [ 1015.412172] env[63028]: DEBUG nova.compute.manager [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1015.425702] env[63028]: DEBUG oslo_vmware.api [None req-096e785d-d7b3-4b6c-a2b1-27d70c79e854 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736085, 'name': SuspendVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.438059] env[63028]: DEBUG nova.virt.hardware [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1015.438316] env[63028]: DEBUG nova.virt.hardware [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1015.438476] env[63028]: DEBUG nova.virt.hardware [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1015.438656] env[63028]: DEBUG nova.virt.hardware [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1015.438800] env[63028]: DEBUG nova.virt.hardware [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1015.438944] env[63028]: DEBUG nova.virt.hardware [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1015.439164] env[63028]: DEBUG nova.virt.hardware [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1015.439327] env[63028]: DEBUG nova.virt.hardware [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1015.439496] env[63028]: DEBUG nova.virt.hardware [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1015.439654] env[63028]: DEBUG nova.virt.hardware [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1015.439824] env[63028]: DEBUG nova.virt.hardware [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1015.440633] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7883c18-7aca-4e49-bf3c-1a34b683137c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.448056] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6600154a-a26e-4e14-80c5-0e9cffd506d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.659061] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "[datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.659295] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Processing image 89f02435-fa22-4275-ab99-73170c1e53cf {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1015.659666] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf/89f02435-fa22-4275-ab99-73170c1e53cf.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.659859] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "[datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf/89f02435-fa22-4275-ab99-73170c1e53cf.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.660064] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1015.660349] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74018edf-ce87-4894-abda-07abdde7b134 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.677938] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1015.678188] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1015.678965] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccc48af9-cf11-434f-a662-09f98433202e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.684321] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1015.684321] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52cfd47f-f14e-684c-d470-c94e5d555211" [ 1015.684321] env[63028]: _type = "Task" [ 1015.684321] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.691890] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52cfd47f-f14e-684c-d470-c94e5d555211, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.747186] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072255} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.748479] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1015.749279] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4569a91-a734-4682-b7b9-38c6fa16003e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.770842] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6/56e6ade9-893b-4c85-b0b8-e9f7b12cbad6.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1015.771150] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a84e3aa-e801-4efd-9b3c-78c97452921b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.790991] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1015.790991] env[63028]: value = "task-2736087" [ 1015.790991] env[63028]: _type = "Task" [ 1015.790991] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.800541] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736087, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.840995] env[63028]: DEBUG oslo_concurrency.lockutils [None req-58bcb03d-d4ef-41ee-99d4-f0fb768c981b tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.715s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.927408] env[63028]: DEBUG oslo_vmware.api [None req-096e785d-d7b3-4b6c-a2b1-27d70c79e854 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736085, 'name': SuspendVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.999492] env[63028]: DEBUG nova.compute.manager [req-03a7e2c4-0565-48c4-86cf-a576ccb61b3d req-8df9b044-cae7-4e31-98eb-c7ab6fb3fc87 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Received event network-vif-plugged-47a80dc6-e819-410f-8257-cfc2cce0d3d9 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1015.999580] env[63028]: DEBUG oslo_concurrency.lockutils [req-03a7e2c4-0565-48c4-86cf-a576ccb61b3d req-8df9b044-cae7-4e31-98eb-c7ab6fb3fc87 service nova] Acquiring lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.999930] env[63028]: DEBUG oslo_concurrency.lockutils [req-03a7e2c4-0565-48c4-86cf-a576ccb61b3d req-8df9b044-cae7-4e31-98eb-c7ab6fb3fc87 service nova] Lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.000153] env[63028]: DEBUG oslo_concurrency.lockutils [req-03a7e2c4-0565-48c4-86cf-a576ccb61b3d req-8df9b044-cae7-4e31-98eb-c7ab6fb3fc87 service nova] Lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.000317] env[63028]: DEBUG nova.compute.manager [req-03a7e2c4-0565-48c4-86cf-a576ccb61b3d req-8df9b044-cae7-4e31-98eb-c7ab6fb3fc87 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] No waiting events found dispatching network-vif-plugged-47a80dc6-e819-410f-8257-cfc2cce0d3d9 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1016.000487] env[63028]: WARNING nova.compute.manager [req-03a7e2c4-0565-48c4-86cf-a576ccb61b3d req-8df9b044-cae7-4e31-98eb-c7ab6fb3fc87 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Received unexpected event network-vif-plugged-47a80dc6-e819-410f-8257-cfc2cce0d3d9 for instance with vm_state building and task_state spawning. [ 1016.071363] env[63028]: DEBUG nova.network.neutron [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updated VIF entry in instance network info cache for port b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1016.071726] env[63028]: DEBUG nova.network.neutron [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updating instance_info_cache with network_info: [{"id": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "address": "fa:16:3e:cb:aa:63", "network": {"id": "53257b54-f197-4a4e-94bb-23ad6c6b7353", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-575661432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2304ce21bf141cab94fb6c342653812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0d7c3ce-e8", "ovs_interfaceid": "b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.141062] env[63028]: DEBUG nova.network.neutron [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Successfully updated port: 47a80dc6-e819-410f-8257-cfc2cce0d3d9 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1016.170156] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc48a2f5-f313-447b-a5dc-aa4664271997 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.178350] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608088a4-b3f1-4a8f-abfe-bd1627274455 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.214135] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22eb856-63b2-4345-950f-8b5adb3b9e41 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.227127] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc1359f-a5da-4e2c-990a-c8fdec99db6c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.231873] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Preparing fetch location {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1016.232148] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Fetch image to [datastore2] OSTACK_IMG_2fe2be52-b1f4-4b21-9461-251a9f5a64b2/OSTACK_IMG_2fe2be52-b1f4-4b21-9461-251a9f5a64b2.vmdk {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1016.232358] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Downloading stream optimized image 89f02435-fa22-4275-ab99-73170c1e53cf to [datastore2] OSTACK_IMG_2fe2be52-b1f4-4b21-9461-251a9f5a64b2/OSTACK_IMG_2fe2be52-b1f4-4b21-9461-251a9f5a64b2.vmdk on the data store datastore2 as vApp {{(pid=63028) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1016.232548] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Downloading image file data 89f02435-fa22-4275-ab99-73170c1e53cf to the ESX as VM named 'OSTACK_IMG_2fe2be52-b1f4-4b21-9461-251a9f5a64b2' {{(pid=63028) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1016.248440] env[63028]: DEBUG nova.compute.provider_tree [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.303435] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736087, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.320246] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1016.320246] env[63028]: value = "resgroup-9" [ 1016.320246] env[63028]: _type = "ResourcePool" [ 1016.320246] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1016.320655] env[63028]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-115dcc6b-c746-4b52-a89b-66e48c07fe50 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.341355] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lease: (returnval){ [ 1016.341355] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529b9df1-75de-29d8-6ed0-f6af5acfaba9" [ 1016.341355] env[63028]: _type = "HttpNfcLease" [ 1016.341355] env[63028]: } obtained for vApp import into resource pool (val){ [ 1016.341355] env[63028]: value = "resgroup-9" [ 1016.341355] env[63028]: _type = "ResourcePool" [ 1016.341355] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1016.341661] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the lease: (returnval){ [ 1016.341661] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529b9df1-75de-29d8-6ed0-f6af5acfaba9" [ 1016.341661] env[63028]: _type = "HttpNfcLease" [ 1016.341661] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1016.348450] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1016.348450] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529b9df1-75de-29d8-6ed0-f6af5acfaba9" [ 1016.348450] env[63028]: _type = "HttpNfcLease" [ 1016.348450] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1016.426681] env[63028]: DEBUG oslo_vmware.api [None req-096e785d-d7b3-4b6c-a2b1-27d70c79e854 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736085, 'name': SuspendVM_Task, 'duration_secs': 1.751517} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.426907] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-096e785d-d7b3-4b6c-a2b1-27d70c79e854 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Suspended the VM {{(pid=63028) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1016.427157] env[63028]: DEBUG nova.compute.manager [None req-096e785d-d7b3-4b6c-a2b1-27d70c79e854 tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1016.427935] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ba9e3e-fd61-4079-a211-d4031aa0add6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.576852] env[63028]: DEBUG oslo_concurrency.lockutils [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] Releasing lock "refresh_cache-8bb61bfa-d44e-4e06-867a-445d9b3db660" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.577189] env[63028]: DEBUG nova.compute.manager [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Received event network-vif-plugged-2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1016.577401] env[63028]: DEBUG oslo_concurrency.lockutils [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] Acquiring lock "85aafadb-81d6-4687-aed1-fbe829e5f95f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.577634] env[63028]: DEBUG oslo_concurrency.lockutils [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.577807] env[63028]: DEBUG oslo_concurrency.lockutils [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.577986] env[63028]: DEBUG nova.compute.manager [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] No waiting events found dispatching network-vif-plugged-2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1016.578177] env[63028]: WARNING nova.compute.manager [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Received unexpected event network-vif-plugged-2e2d8403-826c-4e24-ba3c-123d444d1fdc for instance with vm_state shelved_offloaded and task_state spawning. [ 1016.578369] env[63028]: DEBUG nova.compute.manager [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Received event network-changed-2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1016.578591] env[63028]: DEBUG nova.compute.manager [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Refreshing instance network info cache due to event network-changed-2e2d8403-826c-4e24-ba3c-123d444d1fdc. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1016.578810] env[63028]: DEBUG oslo_concurrency.lockutils [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] Acquiring lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.578958] env[63028]: DEBUG oslo_concurrency.lockutils [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] Acquired lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.579146] env[63028]: DEBUG nova.network.neutron [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Refreshing network info cache for port 2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1016.643758] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.643935] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquired lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.644139] env[63028]: DEBUG nova.network.neutron [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1016.752216] env[63028]: DEBUG nova.scheduler.client.report [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1016.800222] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736087, 'name': ReconfigVM_Task, 'duration_secs': 0.55025} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.800505] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6/56e6ade9-893b-4c85-b0b8-e9f7b12cbad6.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1016.801360] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f9958c5-efc1-4eb7-9f16-0e6e1c5df526 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.808312] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1016.808312] env[63028]: value = "task-2736089" [ 1016.808312] env[63028]: _type = "Task" [ 1016.808312] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.816279] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736089, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.849044] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1016.849044] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529b9df1-75de-29d8-6ed0-f6af5acfaba9" [ 1016.849044] env[63028]: _type = "HttpNfcLease" [ 1016.849044] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1016.849325] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1016.849325] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529b9df1-75de-29d8-6ed0-f6af5acfaba9" [ 1016.849325] env[63028]: _type = "HttpNfcLease" [ 1016.849325] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1016.850045] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c72c61b-c0d0-433a-92d2-3362f0d1d1df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.857174] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52868399-f624-0642-4f78-78e2be0858e9/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1016.857349] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52868399-f624-0642-4f78-78e2be0858e9/disk-0.vmdk. {{(pid=63028) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1016.922833] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-10eeb37f-f5aa-47e4-886c-87cabd3c2ec0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.195166] env[63028]: DEBUG nova.network.neutron [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1017.257711] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.946s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.260415] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.137s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.262234] env[63028]: INFO nova.compute.claims [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1017.288641] env[63028]: INFO nova.scheduler.client.report [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Deleted allocations for instance a97224e8-d69b-4c62-ab96-7cef037ef39b [ 1017.322911] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736089, 'name': Rename_Task, 'duration_secs': 0.132936} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.324703] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1017.324989] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99ef3d13-8245-449a-ae90-b52c27d98649 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.332265] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1017.332265] env[63028]: value = "task-2736090" [ 1017.332265] env[63028]: _type = "Task" [ 1017.332265] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.345675] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736090, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.506826] env[63028]: DEBUG nova.network.neutron [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Updating instance_info_cache with network_info: [{"id": "47a80dc6-e819-410f-8257-cfc2cce0d3d9", "address": "fa:16:3e:b7:57:e4", "network": {"id": "8fd5dd35-416e-45e2-aea5-8c2c22752ef7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1819490475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae448aa28a84aa6863fffc24a5448fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47a80dc6-e8", "ovs_interfaceid": "47a80dc6-e819-410f-8257-cfc2cce0d3d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.524129] env[63028]: DEBUG nova.network.neutron [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updated VIF entry in instance network info cache for port 2e2d8403-826c-4e24-ba3c-123d444d1fdc. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1017.524683] env[63028]: DEBUG nova.network.neutron [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updating instance_info_cache with network_info: [{"id": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "address": "fa:16:3e:09:d0:a2", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d8403-82", "ovs_interfaceid": "2e2d8403-826c-4e24-ba3c-123d444d1fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.798981] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f6770839-a53a-44cd-af29-6d3e478d0666 tempest-ListServersNegativeTestJSON-712341660 tempest-ListServersNegativeTestJSON-712341660-project-member] Lock "a97224e8-d69b-4c62-ab96-7cef037ef39b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.933s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.845815] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736090, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.011462] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Releasing lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.011797] env[63028]: DEBUG nova.compute.manager [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Instance network_info: |[{"id": "47a80dc6-e819-410f-8257-cfc2cce0d3d9", "address": "fa:16:3e:b7:57:e4", "network": {"id": "8fd5dd35-416e-45e2-aea5-8c2c22752ef7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1819490475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae448aa28a84aa6863fffc24a5448fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47a80dc6-e8", "ovs_interfaceid": "47a80dc6-e819-410f-8257-cfc2cce0d3d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1018.012269] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:57:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '489b2441-7132-4942-8b61-49cf0ad4400e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47a80dc6-e819-410f-8257-cfc2cce0d3d9', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1018.020207] env[63028]: DEBUG oslo.service.loopingcall [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1018.020448] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1018.020675] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b648e8f-0977-4d06-aec0-49c05f4ec435 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.039570] env[63028]: DEBUG oslo_concurrency.lockutils [req-5532253a-28b4-4a5d-97b5-18c3dbe06cd6 req-fa881c19-0215-4ddd-bed2-ead6223b7b2d service nova] Releasing lock "refresh_cache-85aafadb-81d6-4687-aed1-fbe829e5f95f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.040872] env[63028]: DEBUG nova.compute.manager [req-9aec382d-b90a-4b02-86a3-e1e149c234df req-e42ea827-5c65-4949-b74a-bd19bfebc076 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Received event network-changed-47a80dc6-e819-410f-8257-cfc2cce0d3d9 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1018.041063] env[63028]: DEBUG nova.compute.manager [req-9aec382d-b90a-4b02-86a3-e1e149c234df req-e42ea827-5c65-4949-b74a-bd19bfebc076 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Refreshing instance network info cache due to event network-changed-47a80dc6-e819-410f-8257-cfc2cce0d3d9. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1018.041275] env[63028]: DEBUG oslo_concurrency.lockutils [req-9aec382d-b90a-4b02-86a3-e1e149c234df req-e42ea827-5c65-4949-b74a-bd19bfebc076 service nova] Acquiring lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.041417] env[63028]: DEBUG oslo_concurrency.lockutils [req-9aec382d-b90a-4b02-86a3-e1e149c234df req-e42ea827-5c65-4949-b74a-bd19bfebc076 service nova] Acquired lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.041578] env[63028]: DEBUG nova.network.neutron [req-9aec382d-b90a-4b02-86a3-e1e149c234df req-e42ea827-5c65-4949-b74a-bd19bfebc076 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Refreshing network info cache for port 47a80dc6-e819-410f-8257-cfc2cce0d3d9 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1018.045906] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Completed reading data from the image iterator. {{(pid=63028) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1018.046084] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52868399-f624-0642-4f78-78e2be0858e9/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1018.047204] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f978a1-5eb9-4559-a394-f2352c723a49 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.052080] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1018.052080] env[63028]: value = "task-2736091" [ 1018.052080] env[63028]: _type = "Task" [ 1018.052080] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.056872] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52868399-f624-0642-4f78-78e2be0858e9/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1018.057040] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52868399-f624-0642-4f78-78e2be0858e9/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1018.057570] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-6e3639ec-6300-455c-9f49-cadfc88e0788 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.062627] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736091, 'name': CreateVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.072825] env[63028]: INFO nova.compute.manager [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Resuming [ 1018.073443] env[63028]: DEBUG nova.objects.instance [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lazy-loading 'flavor' on Instance uuid 63524cd8-81de-419f-bb07-0326f3cb393f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.344753] env[63028]: DEBUG oslo_vmware.api [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736090, 'name': PowerOnVM_Task, 'duration_secs': 0.572334} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.345019] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1018.345330] env[63028]: INFO nova.compute.manager [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Took 8.92 seconds to spawn the instance on the hypervisor. [ 1018.345409] env[63028]: DEBUG nova.compute.manager [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1018.346293] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d800081e-dcd1-4f28-9245-24180b217005 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.405145] env[63028]: DEBUG oslo_vmware.rw_handles [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52868399-f624-0642-4f78-78e2be0858e9/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1018.405145] env[63028]: INFO nova.virt.vmwareapi.images [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Downloaded image file data 89f02435-fa22-4275-ab99-73170c1e53cf [ 1018.406297] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b2e417-571f-4862-94ec-8f00b3c354eb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.424600] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e86dae38-8fb5-4ce5-acd6-bfad23d3514d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.456202] env[63028]: INFO nova.virt.vmwareapi.images [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] The imported VM was unregistered [ 1018.456202] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Caching image {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1018.456760] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating directory with path [datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1018.460278] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5aaf3041-1e89-4a8f-837f-d71aec3ee1d3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.490375] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Created directory with path [datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1018.491602] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_2fe2be52-b1f4-4b21-9461-251a9f5a64b2/OSTACK_IMG_2fe2be52-b1f4-4b21-9461-251a9f5a64b2.vmdk to [datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf/89f02435-fa22-4275-ab99-73170c1e53cf.vmdk. {{(pid=63028) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1018.492317] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-3a72838e-fb35-4089-a2f2-f79df1def525 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.505779] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1018.505779] env[63028]: value = "task-2736093" [ 1018.505779] env[63028]: _type = "Task" [ 1018.505779] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.517072] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736093, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.569032] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736091, 'name': CreateVM_Task, 'duration_secs': 0.330911} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.569032] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1018.569032] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.569032] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.569032] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1018.569032] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-605ba98f-d0ef-42be-ae3d-b41a5028d57c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.574732] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1018.574732] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5261a402-c295-15f7-71d4-8259963b4a32" [ 1018.574732] env[63028]: _type = "Task" [ 1018.574732] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.588500] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5261a402-c295-15f7-71d4-8259963b4a32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.631890] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f62803e-b8ef-486e-9263-e15614f33df7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.640201] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f064418f-0e5b-4260-9e58-a7c7ffd87ae2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.673881] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34658d0d-636a-456c-b1bd-f5b167cdab62 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.681517] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da9e457-be14-4df1-9a80-dfc1fdde4434 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.694822] env[63028]: DEBUG nova.compute.provider_tree [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.868233] env[63028]: INFO nova.compute.manager [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Took 21.82 seconds to build instance. [ 1019.015213] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736093, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.090072] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5261a402-c295-15f7-71d4-8259963b4a32, 'name': SearchDatastore_Task, 'duration_secs': 0.035617} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.090682] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.090938] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1019.091253] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.091405] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.091592] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1019.091890] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62c10eba-a6f5-4695-8a08-67ec96d8e21b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.106344] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1019.106553] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1019.107284] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13ef465e-50ed-4af0-9b17-797eddd0d0b5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.115024] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1019.115024] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5278d265-0d83-be86-ed73-a868c7455237" [ 1019.115024] env[63028]: _type = "Task" [ 1019.115024] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.125995] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5278d265-0d83-be86-ed73-a868c7455237, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.172572] env[63028]: DEBUG nova.network.neutron [req-9aec382d-b90a-4b02-86a3-e1e149c234df req-e42ea827-5c65-4949-b74a-bd19bfebc076 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Updated VIF entry in instance network info cache for port 47a80dc6-e819-410f-8257-cfc2cce0d3d9. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1019.172981] env[63028]: DEBUG nova.network.neutron [req-9aec382d-b90a-4b02-86a3-e1e149c234df req-e42ea827-5c65-4949-b74a-bd19bfebc076 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Updating instance_info_cache with network_info: [{"id": "47a80dc6-e819-410f-8257-cfc2cce0d3d9", "address": "fa:16:3e:b7:57:e4", "network": {"id": "8fd5dd35-416e-45e2-aea5-8c2c22752ef7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1819490475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae448aa28a84aa6863fffc24a5448fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47a80dc6-e8", "ovs_interfaceid": "47a80dc6-e819-410f-8257-cfc2cce0d3d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.198564] env[63028]: DEBUG nova.scheduler.client.report [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1019.374023] env[63028]: DEBUG oslo_concurrency.lockutils [None req-618d358e-08e2-4af1-b725-9b296f03d89a tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.332s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.517086] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736093, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.595240] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.595240] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquired lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.595240] env[63028]: DEBUG nova.network.neutron [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.632048] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5278d265-0d83-be86-ed73-a868c7455237, 'name': SearchDatastore_Task, 'duration_secs': 0.068062} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.632048] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1ff077d-1c17-47a3-aaf3-10a6749f3dbd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.639152] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1019.639152] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525cc135-9c04-c501-c664-46196119f13c" [ 1019.639152] env[63028]: _type = "Task" [ 1019.639152] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.648499] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525cc135-9c04-c501-c664-46196119f13c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.677835] env[63028]: DEBUG oslo_concurrency.lockutils [req-9aec382d-b90a-4b02-86a3-e1e149c234df req-e42ea827-5c65-4949-b74a-bd19bfebc076 service nova] Releasing lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.706095] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.707055] env[63028]: DEBUG nova.compute.manager [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1019.709293] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.971s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.709516] env[63028]: DEBUG nova.objects.instance [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Lazy-loading 'resources' on Instance uuid 3fb46d02-7914-4d08-b63b-f3447ba1b81a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1020.016851] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736093, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.151152] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525cc135-9c04-c501-c664-46196119f13c, 'name': SearchDatastore_Task, 'duration_secs': 0.097781} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.151465] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.151729] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] b16d85d7-13f3-4be0-8495-2fd2c1476f01/b16d85d7-13f3-4be0-8495-2fd2c1476f01.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1020.151997] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d22869ba-2aed-4567-977a-d2cc4b1ac0fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.159282] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1020.159282] env[63028]: value = "task-2736094" [ 1020.159282] env[63028]: _type = "Task" [ 1020.159282] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.168354] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.219022] env[63028]: DEBUG nova.compute.utils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1020.222892] env[63028]: DEBUG nova.compute.manager [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1020.223338] env[63028]: DEBUG nova.network.neutron [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1020.347558] env[63028]: DEBUG nova.policy [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43ed2fb3f1a944fdac8ee7778f171cd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8efc6d89903c454eb39136a76e0adef5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1020.519711] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736093, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.528556] env[63028]: DEBUG nova.network.neutron [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Updating instance_info_cache with network_info: [{"id": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "address": "fa:16:3e:bc:87:07", "network": {"id": "37013470-5bda-41a6-826c-03ac0d423c85", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1214578902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d3fdfda1694b2f9f5985831ea77a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap296dfd9e-84", "ovs_interfaceid": "296dfd9e-84e1-4ea8-bd17-28920a6a048b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.641565] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80cf2a38-e7db-45ba-ba55-95cc24457b0f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.654918] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415232b6-3831-4f5d-b7f4-a52636a0470a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.688700] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73bbc689-0036-4edf-8e82-106f56fe7e65 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.694369] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.699371] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d713e9-460e-4ad8-bd04-8ed9fe490eeb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.713224] env[63028]: DEBUG nova.compute.provider_tree [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.723843] env[63028]: DEBUG nova.compute.manager [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1020.846933] env[63028]: DEBUG nova.compute.manager [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Stashing vm_state: active {{(pid=63028) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1020.912485] env[63028]: DEBUG nova.network.neutron [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Successfully created port: 7a1202c7-93f9-43c0-ab7e-9c1c8a578e37 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1021.017111] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736093, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.036850] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Releasing lock "refresh_cache-63524cd8-81de-419f-bb07-0326f3cb393f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.036850] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd536e3-79bb-4287-b732-768c766de79d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.043233] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Resuming the VM {{(pid=63028) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1021.043233] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06a4c3f5-95d8-43ad-9977-89c961713a31 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.050050] env[63028]: DEBUG oslo_vmware.api [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 1021.050050] env[63028]: value = "task-2736095" [ 1021.050050] env[63028]: _type = "Task" [ 1021.050050] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.059173] env[63028]: DEBUG oslo_vmware.api [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736095, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.170757] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.215964] env[63028]: DEBUG nova.scheduler.client.report [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1021.368868] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.519499] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736093, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.851415} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.519839] env[63028]: INFO nova.virt.vmwareapi.ds_util [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_2fe2be52-b1f4-4b21-9461-251a9f5a64b2/OSTACK_IMG_2fe2be52-b1f4-4b21-9461-251a9f5a64b2.vmdk to [datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf/89f02435-fa22-4275-ab99-73170c1e53cf.vmdk. [ 1021.520051] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Cleaning up location [datastore2] OSTACK_IMG_2fe2be52-b1f4-4b21-9461-251a9f5a64b2 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1021.520225] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_2fe2be52-b1f4-4b21-9461-251a9f5a64b2 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.520497] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e532e93f-d06c-4105-a9a2-a71210d289bd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.527798] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1021.527798] env[63028]: value = "task-2736096" [ 1021.527798] env[63028]: _type = "Task" [ 1021.527798] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.540475] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.559566] env[63028]: DEBUG oslo_vmware.api [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736095, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.670098] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736094, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.721264] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.012s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.724686] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.532s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.726864] env[63028]: INFO nova.compute.claims [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1021.732493] env[63028]: DEBUG nova.compute.manager [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1021.747138] env[63028]: INFO nova.scheduler.client.report [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Deleted allocations for instance 3fb46d02-7914-4d08-b63b-f3447ba1b81a [ 1021.764929] env[63028]: DEBUG nova.virt.hardware [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1021.765187] env[63028]: DEBUG nova.virt.hardware [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1021.765346] env[63028]: DEBUG nova.virt.hardware [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1021.765540] env[63028]: DEBUG nova.virt.hardware [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1021.766998] env[63028]: DEBUG nova.virt.hardware [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1021.766998] env[63028]: DEBUG nova.virt.hardware [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1021.766998] env[63028]: DEBUG nova.virt.hardware [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1021.766998] env[63028]: DEBUG nova.virt.hardware [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1021.766998] env[63028]: DEBUG nova.virt.hardware [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1021.766998] env[63028]: DEBUG nova.virt.hardware [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1021.766998] env[63028]: DEBUG nova.virt.hardware [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1021.769067] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97907f02-8d92-4d83-8ae3-5b3978b43ab5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.775870] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b695ccc-1b48-4d73-aba3-919038ca9d35 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.044030] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736096, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159148} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.044030] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.044030] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "[datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf/89f02435-fa22-4275-ab99-73170c1e53cf.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.044030] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf/89f02435-fa22-4275-ab99-73170c1e53cf.vmdk to [datastore2] 85aafadb-81d6-4687-aed1-fbe829e5f95f/85aafadb-81d6-4687-aed1-fbe829e5f95f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1022.044030] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-808b3b04-f3dc-49f2-b34f-44e015aaf4d8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.056408] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1022.056408] env[63028]: value = "task-2736097" [ 1022.056408] env[63028]: _type = "Task" [ 1022.056408] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.064173] env[63028]: DEBUG oslo_vmware.api [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736095, 'name': PowerOnVM_Task, 'duration_secs': 0.966544} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.064887] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Resumed the VM {{(pid=63028) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1022.065112] env[63028]: DEBUG nova.compute.manager [None req-e91a8cc2-0dac-433e-94df-97a1f15f8c2e tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1022.066090] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad05ad6e-0e18-4fda-931c-bd6beee29142 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.072770] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736097, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.171810] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736094, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.539807} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.172110] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] b16d85d7-13f3-4be0-8495-2fd2c1476f01/b16d85d7-13f3-4be0-8495-2fd2c1476f01.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1022.172337] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1022.172594] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3948890-988c-4c4f-87e8-bcb04bb6a829 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.179415] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1022.179415] env[63028]: value = "task-2736098" [ 1022.179415] env[63028]: _type = "Task" [ 1022.179415] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.192812] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736098, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.256571] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42356a16-d54c-4644-8977-b15ab56af7b9 tempest-ServerDiagnosticsTest-461877478 tempest-ServerDiagnosticsTest-461877478-project-member] Lock "3fb46d02-7914-4d08-b63b-f3447ba1b81a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.604s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.445418] env[63028]: DEBUG nova.compute.manager [req-9b5b7991-f324-4dff-828d-76aab9bd53d8 req-6df87375-9b60-4c63-b29c-f5c7787421bb service nova] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Received event network-vif-plugged-7a1202c7-93f9-43c0-ab7e-9c1c8a578e37 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1022.445637] env[63028]: DEBUG oslo_concurrency.lockutils [req-9b5b7991-f324-4dff-828d-76aab9bd53d8 req-6df87375-9b60-4c63-b29c-f5c7787421bb service nova] Acquiring lock "c492dea4-9779-4460-a559-5b82fb0643f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.445863] env[63028]: DEBUG oslo_concurrency.lockutils [req-9b5b7991-f324-4dff-828d-76aab9bd53d8 req-6df87375-9b60-4c63-b29c-f5c7787421bb service nova] Lock "c492dea4-9779-4460-a559-5b82fb0643f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.446047] env[63028]: DEBUG oslo_concurrency.lockutils [req-9b5b7991-f324-4dff-828d-76aab9bd53d8 req-6df87375-9b60-4c63-b29c-f5c7787421bb service nova] Lock "c492dea4-9779-4460-a559-5b82fb0643f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.446222] env[63028]: DEBUG nova.compute.manager [req-9b5b7991-f324-4dff-828d-76aab9bd53d8 req-6df87375-9b60-4c63-b29c-f5c7787421bb service nova] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] No waiting events found dispatching network-vif-plugged-7a1202c7-93f9-43c0-ab7e-9c1c8a578e37 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1022.446386] env[63028]: WARNING nova.compute.manager [req-9b5b7991-f324-4dff-828d-76aab9bd53d8 req-6df87375-9b60-4c63-b29c-f5c7787421bb service nova] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Received unexpected event network-vif-plugged-7a1202c7-93f9-43c0-ab7e-9c1c8a578e37 for instance with vm_state building and task_state spawning. [ 1022.568753] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736097, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.635612] env[63028]: DEBUG nova.network.neutron [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Successfully updated port: 7a1202c7-93f9-43c0-ab7e-9c1c8a578e37 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1022.689445] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736098, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092096} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.690113] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1022.690550] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee501587-e64c-40dd-8ad5-db627ebc0c2f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.713640] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] b16d85d7-13f3-4be0-8495-2fd2c1476f01/b16d85d7-13f3-4be0-8495-2fd2c1476f01.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1022.713958] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8f3f1d7-71da-409a-8b3f-e3ee1bb5921d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.733855] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "60d18f14-536a-4b0f-912b-21f3f5a30d28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.734104] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "60d18f14-536a-4b0f-912b-21f3f5a30d28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.739904] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1022.739904] env[63028]: value = "task-2736099" [ 1022.739904] env[63028]: _type = "Task" [ 1022.739904] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.747875] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736099, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.072222] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736097, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.077973] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f9e2e5-4e61-4a2d-bd0f-0c18f8525c09 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.087243] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261446b2-4b36-4957-bcc7-d3a88645ed0b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.121051] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a738cc-0fee-4176-97af-0250c6d36db0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.130170] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8839365-f69d-463c-b064-296c8824e26b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.143385] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "refresh_cache-c492dea4-9779-4460-a559-5b82fb0643f0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.143549] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "refresh_cache-c492dea4-9779-4460-a559-5b82fb0643f0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.143702] env[63028]: DEBUG nova.network.neutron [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1023.145186] env[63028]: DEBUG nova.compute.provider_tree [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.240533] env[63028]: DEBUG nova.compute.manager [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1023.253037] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736099, 'name': ReconfigVM_Task, 'duration_secs': 0.42064} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.253325] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Reconfigured VM instance instance-0000005e to attach disk [datastore2] b16d85d7-13f3-4be0-8495-2fd2c1476f01/b16d85d7-13f3-4be0-8495-2fd2c1476f01.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1023.254090] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a54253c0-ec9f-45e1-a63e-5fe28e9000d5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.261331] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1023.261331] env[63028]: value = "task-2736100" [ 1023.261331] env[63028]: _type = "Task" [ 1023.261331] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.270937] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736100, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.571814] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736097, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.654966] env[63028]: DEBUG nova.scheduler.client.report [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1023.712940] env[63028]: DEBUG nova.network.neutron [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1023.772512] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736100, 'name': Rename_Task, 'duration_secs': 0.356847} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.774056] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.774056] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1023.774548] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa18f5fa-5f3e-495f-9e07-eb91e3b48dc6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.787334] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1023.787334] env[63028]: value = "task-2736101" [ 1023.787334] env[63028]: _type = "Task" [ 1023.787334] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.794855] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736101, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.948747] env[63028]: DEBUG nova.network.neutron [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Updating instance_info_cache with network_info: [{"id": "7a1202c7-93f9-43c0-ab7e-9c1c8a578e37", "address": "fa:16:3e:a6:7a:72", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a1202c7-93", "ovs_interfaceid": "7a1202c7-93f9-43c0-ab7e-9c1c8a578e37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.069833] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736097, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.160932] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.161508] env[63028]: DEBUG nova.compute.manager [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1024.165132] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.843s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.166139] env[63028]: INFO nova.compute.claims [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1024.297200] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736101, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.456872] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "refresh_cache-c492dea4-9779-4460-a559-5b82fb0643f0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.457243] env[63028]: DEBUG nova.compute.manager [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Instance network_info: |[{"id": "7a1202c7-93f9-43c0-ab7e-9c1c8a578e37", "address": "fa:16:3e:a6:7a:72", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a1202c7-93", "ovs_interfaceid": "7a1202c7-93f9-43c0-ab7e-9c1c8a578e37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1024.457722] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:7a:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a1202c7-93f9-43c0-ab7e-9c1c8a578e37', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1024.466425] env[63028]: DEBUG oslo.service.loopingcall [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1024.466693] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1024.466930] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c8b88b9-8c2a-49d9-bb2b-d8a6aabdb7bd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.487696] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1024.487696] env[63028]: value = "task-2736102" [ 1024.487696] env[63028]: _type = "Task" [ 1024.487696] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.501329] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736102, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.521645] env[63028]: DEBUG nova.compute.manager [req-fa698424-e2bc-4ca8-8464-c6089fb3b975 req-4c965865-cfe1-4001-baf2-66d4958d8cb4 service nova] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Received event network-changed-7a1202c7-93f9-43c0-ab7e-9c1c8a578e37 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1024.521927] env[63028]: DEBUG nova.compute.manager [req-fa698424-e2bc-4ca8-8464-c6089fb3b975 req-4c965865-cfe1-4001-baf2-66d4958d8cb4 service nova] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Refreshing instance network info cache due to event network-changed-7a1202c7-93f9-43c0-ab7e-9c1c8a578e37. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1024.522545] env[63028]: DEBUG oslo_concurrency.lockutils [req-fa698424-e2bc-4ca8-8464-c6089fb3b975 req-4c965865-cfe1-4001-baf2-66d4958d8cb4 service nova] Acquiring lock "refresh_cache-c492dea4-9779-4460-a559-5b82fb0643f0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.522842] env[63028]: DEBUG oslo_concurrency.lockutils [req-fa698424-e2bc-4ca8-8464-c6089fb3b975 req-4c965865-cfe1-4001-baf2-66d4958d8cb4 service nova] Acquired lock "refresh_cache-c492dea4-9779-4460-a559-5b82fb0643f0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.523064] env[63028]: DEBUG nova.network.neutron [req-fa698424-e2bc-4ca8-8464-c6089fb3b975 req-4c965865-cfe1-4001-baf2-66d4958d8cb4 service nova] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Refreshing network info cache for port 7a1202c7-93f9-43c0-ab7e-9c1c8a578e37 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1024.569923] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736097, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.671383] env[63028]: DEBUG nova.compute.utils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1024.676027] env[63028]: DEBUG nova.compute.manager [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1024.676027] env[63028]: DEBUG nova.network.neutron [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1024.716745] env[63028]: DEBUG nova.policy [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd6ceab2ce694d1cb9708725b9d98e85', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15fbb152d6224fc3928fc0b0cc9c0a29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1024.797254] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736101, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.000424] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736102, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.077775] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736097, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.168858] env[63028]: DEBUG nova.network.neutron [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Successfully created port: 677d4bb7-eef2-4be9-9671-9c92ee555906 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1025.175600] env[63028]: DEBUG nova.compute.manager [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1025.297549] env[63028]: DEBUG oslo_vmware.api [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736101, 'name': PowerOnVM_Task, 'duration_secs': 1.280486} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.300028] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.301224] env[63028]: INFO nova.compute.manager [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Took 9.89 seconds to spawn the instance on the hypervisor. [ 1025.301460] env[63028]: DEBUG nova.compute.manager [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1025.302519] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a7a23a-d5c6-4092-b2b3-5432ce5f5fe1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.460591] env[63028]: DEBUG nova.network.neutron [req-fa698424-e2bc-4ca8-8464-c6089fb3b975 req-4c965865-cfe1-4001-baf2-66d4958d8cb4 service nova] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Updated VIF entry in instance network info cache for port 7a1202c7-93f9-43c0-ab7e-9c1c8a578e37. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1025.460706] env[63028]: DEBUG nova.network.neutron [req-fa698424-e2bc-4ca8-8464-c6089fb3b975 req-4c965865-cfe1-4001-baf2-66d4958d8cb4 service nova] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Updating instance_info_cache with network_info: [{"id": "7a1202c7-93f9-43c0-ab7e-9c1c8a578e37", "address": "fa:16:3e:a6:7a:72", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a1202c7-93", "ovs_interfaceid": "7a1202c7-93f9-43c0-ab7e-9c1c8a578e37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.498827] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736102, 'name': CreateVM_Task, 'duration_secs': 0.619989} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.501376] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1025.502928] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.503100] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.503439] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1025.503662] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ced2f9f5-4af0-47e7-82b2-fd7af8637002 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.508742] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1025.508742] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5288f258-bb53-36a9-1ada-569da7bb73eb" [ 1025.508742] env[63028]: _type = "Task" [ 1025.508742] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.522603] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5288f258-bb53-36a9-1ada-569da7bb73eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.554875] env[63028]: DEBUG oslo_concurrency.lockutils [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "c0693e4c-30b2-4eda-be1e-f6186d78038b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.555241] env[63028]: DEBUG oslo_concurrency.lockutils [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "c0693e4c-30b2-4eda-be1e-f6186d78038b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.555465] env[63028]: DEBUG oslo_concurrency.lockutils [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "c0693e4c-30b2-4eda-be1e-f6186d78038b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.555682] env[63028]: DEBUG oslo_concurrency.lockutils [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "c0693e4c-30b2-4eda-be1e-f6186d78038b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.555862] env[63028]: DEBUG oslo_concurrency.lockutils [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "c0693e4c-30b2-4eda-be1e-f6186d78038b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.563451] env[63028]: INFO nova.compute.manager [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Terminating instance [ 1025.574884] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736097, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.205901} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.576131] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/89f02435-fa22-4275-ab99-73170c1e53cf/89f02435-fa22-4275-ab99-73170c1e53cf.vmdk to [datastore2] 85aafadb-81d6-4687-aed1-fbe829e5f95f/85aafadb-81d6-4687-aed1-fbe829e5f95f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1025.576916] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7f1837-4d60-4dda-821c-5fcdfd120da7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.582140] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982bb9eb-793a-43d4-8a19-62781fcd6c70 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.600569] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989a1c55-0c4c-4673-ba10-4cc0d63532d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.616291] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 85aafadb-81d6-4687-aed1-fbe829e5f95f/85aafadb-81d6-4687-aed1-fbe829e5f95f.vmdk or device None with type streamOptimized {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1025.617013] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c1ad880-1a80-4d9c-9382-10daa9eadce7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.662073] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021adbfc-b06c-48a9-aaaa-db1d6ccdc59b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.664965] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1025.664965] env[63028]: value = "task-2736103" [ 1025.664965] env[63028]: _type = "Task" [ 1025.664965] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.672663] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e047306-f2a9-4997-b3d3-1be07d809231 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.679581] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.691823] env[63028]: DEBUG nova.compute.provider_tree [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.818175] env[63028]: INFO nova.compute.manager [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Took 23.35 seconds to build instance. [ 1025.963427] env[63028]: DEBUG oslo_concurrency.lockutils [req-fa698424-e2bc-4ca8-8464-c6089fb3b975 req-4c965865-cfe1-4001-baf2-66d4958d8cb4 service nova] Releasing lock "refresh_cache-c492dea4-9779-4460-a559-5b82fb0643f0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.020251] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5288f258-bb53-36a9-1ada-569da7bb73eb, 'name': SearchDatastore_Task, 'duration_secs': 0.072387} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.020564] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.020799] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1026.021131] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.021203] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.021381] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.021645] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c194b3a1-48fb-4292-a682-c275d91d22b1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.032692] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.033272] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1026.033786] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-809651c2-cc6e-47f7-90f5-da943a850d45 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.038934] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1026.038934] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5240d3ce-a185-6cb2-33c8-2c1840bb3eaa" [ 1026.038934] env[63028]: _type = "Task" [ 1026.038934] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.046352] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5240d3ce-a185-6cb2-33c8-2c1840bb3eaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.072238] env[63028]: DEBUG nova.compute.manager [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1026.072457] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1026.073325] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969a1078-a414-40fd-92b1-26cbdba9def6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.080712] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1026.080947] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18865e8f-849c-4164-950a-caf746e598af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.087190] env[63028]: DEBUG oslo_vmware.api [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1026.087190] env[63028]: value = "task-2736104" [ 1026.087190] env[63028]: _type = "Task" [ 1026.087190] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.094219] env[63028]: DEBUG oslo_vmware.api [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.174746] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.200157] env[63028]: DEBUG nova.compute.manager [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1026.202835] env[63028]: DEBUG nova.scheduler.client.report [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1026.232416] env[63028]: DEBUG nova.virt.hardware [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1026.232736] env[63028]: DEBUG nova.virt.hardware [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1026.232916] env[63028]: DEBUG nova.virt.hardware [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1026.233184] env[63028]: DEBUG nova.virt.hardware [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1026.233354] env[63028]: DEBUG nova.virt.hardware [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1026.233507] env[63028]: DEBUG nova.virt.hardware [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1026.233759] env[63028]: DEBUG nova.virt.hardware [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1026.233926] env[63028]: DEBUG nova.virt.hardware [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1026.234175] env[63028]: DEBUG nova.virt.hardware [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1026.234379] env[63028]: DEBUG nova.virt.hardware [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1026.234643] env[63028]: DEBUG nova.virt.hardware [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1026.235620] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4e700c-f7ae-4bd9-a9b6-df61a56d106a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.244721] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed89175-f993-4256-91da-ca3759db341c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.321897] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d2530e60-0841-4b86-b7f0-a5706c864f23 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.876s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.549131] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5240d3ce-a185-6cb2-33c8-2c1840bb3eaa, 'name': SearchDatastore_Task, 'duration_secs': 0.061363} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.549919] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b28d8f19-c751-4eed-983b-903a28319cbc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.555038] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1026.555038] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529833f2-5ca9-8d39-3b16-32127c3754bb" [ 1026.555038] env[63028]: _type = "Task" [ 1026.555038] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.561106] env[63028]: DEBUG nova.compute.manager [req-b8f2e6a0-30f0-4229-84d8-789b5f3fc61e req-6fe673e3-68b5-4c3b-956f-336d85345dc6 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Received event network-changed-47a80dc6-e819-410f-8257-cfc2cce0d3d9 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1026.561106] env[63028]: DEBUG nova.compute.manager [req-b8f2e6a0-30f0-4229-84d8-789b5f3fc61e req-6fe673e3-68b5-4c3b-956f-336d85345dc6 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Refreshing instance network info cache due to event network-changed-47a80dc6-e819-410f-8257-cfc2cce0d3d9. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1026.561106] env[63028]: DEBUG oslo_concurrency.lockutils [req-b8f2e6a0-30f0-4229-84d8-789b5f3fc61e req-6fe673e3-68b5-4c3b-956f-336d85345dc6 service nova] Acquiring lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.561106] env[63028]: DEBUG oslo_concurrency.lockutils [req-b8f2e6a0-30f0-4229-84d8-789b5f3fc61e req-6fe673e3-68b5-4c3b-956f-336d85345dc6 service nova] Acquired lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.561106] env[63028]: DEBUG nova.network.neutron [req-b8f2e6a0-30f0-4229-84d8-789b5f3fc61e req-6fe673e3-68b5-4c3b-956f-336d85345dc6 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Refreshing network info cache for port 47a80dc6-e819-410f-8257-cfc2cce0d3d9 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1026.571024] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529833f2-5ca9-8d39-3b16-32127c3754bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.596346] env[63028]: DEBUG oslo_vmware.api [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736104, 'name': PowerOffVM_Task, 'duration_secs': 0.363145} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.596638] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1026.596803] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1026.597177] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d90563db-ebe4-4c34-b025-635a53006c8f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.676653] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736103, 'name': ReconfigVM_Task, 'duration_secs': 0.971462} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.677106] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 85aafadb-81d6-4687-aed1-fbe829e5f95f/85aafadb-81d6-4687-aed1-fbe829e5f95f.vmdk or device None with type streamOptimized {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1026.678278] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_secret_uuid': None, 'guest_format': None, 'size': 0, 'disk_bus': None, 'encryption_format': None, 'boot_index': 0, 'device_name': '/dev/sda', 'device_type': 'disk', 'encrypted': False, 'encryption_options': None, 'image_id': 'f2ba2026-3f4b-431c-97c1-c4ba582a9907'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'guest_format': None, 'disk_bus': None, 'mount_device': '/dev/sdb', 'attachment_id': 'ec214d8b-a579-41a9-8205-e58ea8474d64', 'boot_index': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550837', 'volume_id': '996ec8ec-5318-4963-9384-330f1e01190d', 'name': 'volume-996ec8ec-5318-4963-9384-330f1e01190d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '85aafadb-81d6-4687-aed1-fbe829e5f95f', 'attached_at': '', 'detached_at': '', 'volume_id': '996ec8ec-5318-4963-9384-330f1e01190d', 'serial': '996ec8ec-5318-4963-9384-330f1e01190d'}, 'volume_type': None}], 'swap': None} {{(pid=63028) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1026.678603] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1026.678989] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550837', 'volume_id': '996ec8ec-5318-4963-9384-330f1e01190d', 'name': 'volume-996ec8ec-5318-4963-9384-330f1e01190d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '85aafadb-81d6-4687-aed1-fbe829e5f95f', 'attached_at': '', 'detached_at': '', 'volume_id': '996ec8ec-5318-4963-9384-330f1e01190d', 'serial': '996ec8ec-5318-4963-9384-330f1e01190d'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1026.679902] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583f5f80-a972-4e95-8c31-5e940b24471b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.707232] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e1affe-ee8b-4fe1-8acf-19ace9b84d88 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.714401] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1026.714401] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1026.714401] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Deleting the datastore file [datastore1] c0693e4c-30b2-4eda-be1e-f6186d78038b {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.714401] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.549s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.714401] env[63028]: DEBUG nova.compute.manager [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1026.717095] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5e27d26-8392-497b-97a1-fe2dd022c4b5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.721348] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.430s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.722925] env[63028]: INFO nova.compute.claims [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1026.754054] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] volume-996ec8ec-5318-4963-9384-330f1e01190d/volume-996ec8ec-5318-4963-9384-330f1e01190d.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1026.756338] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-243b2a6e-fe81-49ab-9be2-1e384df81c52 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.770012] env[63028]: DEBUG oslo_vmware.api [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1026.770012] env[63028]: value = "task-2736106" [ 1026.770012] env[63028]: _type = "Task" [ 1026.770012] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.776371] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1026.776371] env[63028]: value = "task-2736107" [ 1026.776371] env[63028]: _type = "Task" [ 1026.776371] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.783271] env[63028]: DEBUG oslo_vmware.api [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736106, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.789121] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736107, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.948854] env[63028]: DEBUG nova.network.neutron [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Successfully updated port: 677d4bb7-eef2-4be9-9671-9c92ee555906 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1027.067900] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]529833f2-5ca9-8d39-3b16-32127c3754bb, 'name': SearchDatastore_Task, 'duration_secs': 0.012847} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.068166] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.068433] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] c492dea4-9779-4460-a559-5b82fb0643f0/c492dea4-9779-4460-a559-5b82fb0643f0.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1027.069042] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb65cd7c-2b38-488b-8d03-276d9994f35c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.077495] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1027.077495] env[63028]: value = "task-2736108" [ 1027.077495] env[63028]: _type = "Task" [ 1027.077495] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.086205] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736108, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.130439] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "63524cd8-81de-419f-bb07-0326f3cb393f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.130706] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "63524cd8-81de-419f-bb07-0326f3cb393f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.131075] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "63524cd8-81de-419f-bb07-0326f3cb393f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.131172] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "63524cd8-81de-419f-bb07-0326f3cb393f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.131292] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "63524cd8-81de-419f-bb07-0326f3cb393f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.134113] env[63028]: INFO nova.compute.manager [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Terminating instance [ 1027.202774] env[63028]: DEBUG nova.compute.manager [req-0ae5b27d-4324-44ad-b1d1-ddaea29d6c51 req-7b6eff49-f65f-494c-8cdc-cb5784fe902f service nova] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Received event network-vif-plugged-677d4bb7-eef2-4be9-9671-9c92ee555906 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1027.204200] env[63028]: DEBUG oslo_concurrency.lockutils [req-0ae5b27d-4324-44ad-b1d1-ddaea29d6c51 req-7b6eff49-f65f-494c-8cdc-cb5784fe902f service nova] Acquiring lock "da23282a-bbda-47bf-9d9c-337ee9996779-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.204200] env[63028]: DEBUG oslo_concurrency.lockutils [req-0ae5b27d-4324-44ad-b1d1-ddaea29d6c51 req-7b6eff49-f65f-494c-8cdc-cb5784fe902f service nova] Lock "da23282a-bbda-47bf-9d9c-337ee9996779-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.204200] env[63028]: DEBUG oslo_concurrency.lockutils [req-0ae5b27d-4324-44ad-b1d1-ddaea29d6c51 req-7b6eff49-f65f-494c-8cdc-cb5784fe902f service nova] Lock "da23282a-bbda-47bf-9d9c-337ee9996779-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.204200] env[63028]: DEBUG nova.compute.manager [req-0ae5b27d-4324-44ad-b1d1-ddaea29d6c51 req-7b6eff49-f65f-494c-8cdc-cb5784fe902f service nova] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] No waiting events found dispatching network-vif-plugged-677d4bb7-eef2-4be9-9671-9c92ee555906 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1027.204200] env[63028]: WARNING nova.compute.manager [req-0ae5b27d-4324-44ad-b1d1-ddaea29d6c51 req-7b6eff49-f65f-494c-8cdc-cb5784fe902f service nova] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Received unexpected event network-vif-plugged-677d4bb7-eef2-4be9-9671-9c92ee555906 for instance with vm_state building and task_state spawning. [ 1027.223322] env[63028]: DEBUG nova.compute.utils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1027.227182] env[63028]: DEBUG nova.compute.manager [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1027.227374] env[63028]: DEBUG nova.network.neutron [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1027.287351] env[63028]: DEBUG oslo_vmware.api [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736106, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.257353} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.292479] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.292889] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1027.292889] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1027.293082] env[63028]: INFO nova.compute.manager [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1027.293331] env[63028]: DEBUG oslo.service.loopingcall [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.293539] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736107, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.295560] env[63028]: DEBUG nova.compute.manager [-] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1027.295560] env[63028]: DEBUG nova.network.neutron [-] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1027.326415] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.326614] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.326852] env[63028]: INFO nova.compute.manager [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Rebooting instance [ 1027.345558] env[63028]: DEBUG nova.policy [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c022ca18b0a41ce9d790fa25f6ebf8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea26842446ec4691a6456a6659188704', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1027.452026] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Acquiring lock "refresh_cache-da23282a-bbda-47bf-9d9c-337ee9996779" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.452026] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Acquired lock "refresh_cache-da23282a-bbda-47bf-9d9c-337ee9996779" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.452026] env[63028]: DEBUG nova.network.neutron [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1027.541919] env[63028]: DEBUG nova.network.neutron [req-b8f2e6a0-30f0-4229-84d8-789b5f3fc61e req-6fe673e3-68b5-4c3b-956f-336d85345dc6 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Updated VIF entry in instance network info cache for port 47a80dc6-e819-410f-8257-cfc2cce0d3d9. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1027.541919] env[63028]: DEBUG nova.network.neutron [req-b8f2e6a0-30f0-4229-84d8-789b5f3fc61e req-6fe673e3-68b5-4c3b-956f-336d85345dc6 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Updating instance_info_cache with network_info: [{"id": "47a80dc6-e819-410f-8257-cfc2cce0d3d9", "address": "fa:16:3e:b7:57:e4", "network": {"id": "8fd5dd35-416e-45e2-aea5-8c2c22752ef7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1819490475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae448aa28a84aa6863fffc24a5448fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47a80dc6-e8", "ovs_interfaceid": "47a80dc6-e819-410f-8257-cfc2cce0d3d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.594907] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736108, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.638682] env[63028]: DEBUG nova.compute.manager [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1027.638682] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.639594] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dffb1c-f9da-4cbe-ba1a-ae6570444b33 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.650595] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.651674] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7767da9-0734-49ad-ba50-bae30b5fbd58 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.658837] env[63028]: DEBUG oslo_vmware.api [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 1027.658837] env[63028]: value = "task-2736109" [ 1027.658837] env[63028]: _type = "Task" [ 1027.658837] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.674783] env[63028]: DEBUG oslo_vmware.api [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736109, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.732517] env[63028]: DEBUG nova.compute.manager [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1027.793084] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736107, 'name': ReconfigVM_Task, 'duration_secs': 0.544043} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.796546] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Reconfigured VM instance instance-00000036 to attach disk [datastore2] volume-996ec8ec-5318-4963-9384-330f1e01190d/volume-996ec8ec-5318-4963-9384-330f1e01190d.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1027.802438] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b098d2e1-4a12-45c9-92fa-3b1d484b8965 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.819047] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1027.819047] env[63028]: value = "task-2736110" [ 1027.819047] env[63028]: _type = "Task" [ 1027.819047] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.828328] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736110, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.848996] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.047102] env[63028]: DEBUG oslo_concurrency.lockutils [req-b8f2e6a0-30f0-4229-84d8-789b5f3fc61e req-6fe673e3-68b5-4c3b-956f-336d85345dc6 service nova] Releasing lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.054817] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquired lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.054817] env[63028]: DEBUG nova.network.neutron [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1028.055428] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.055688] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.088764] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736108, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602272} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.089056] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] c492dea4-9779-4460-a559-5b82fb0643f0/c492dea4-9779-4460-a559-5b82fb0643f0.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1028.089279] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1028.089538] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5af7fdda-6ae9-4eaa-ac35-f3235ad512cb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.096483] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1028.096483] env[63028]: value = "task-2736111" [ 1028.096483] env[63028]: _type = "Task" [ 1028.096483] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.110531] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.166117] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03e2cd9-d346-425a-99bc-474f632bb5ed {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.174301] env[63028]: DEBUG oslo_vmware.api [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736109, 'name': PowerOffVM_Task, 'duration_secs': 0.213983} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.176276] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1028.176464] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1028.176763] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e317739b-86e0-4a2c-9975-ffff5d421b10 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.179030] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206dae78-a70c-4caa-b102-45142fa25bea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.210401] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1262f0-f132-42f7-b4d0-47c1a1781a46 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.219918] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c34a200-b7ba-476c-9066-2fe51352e6e8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.232354] env[63028]: DEBUG nova.compute.provider_tree [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.257152] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1028.257385] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1028.257451] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Deleting the datastore file [datastore2] 63524cd8-81de-419f-bb07-0326f3cb393f {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1028.259435] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-86a4835a-9ba1-4225-8006-3c56a81260e8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.263954] env[63028]: DEBUG oslo_vmware.api [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for the task: (returnval){ [ 1028.263954] env[63028]: value = "task-2736113" [ 1028.263954] env[63028]: _type = "Task" [ 1028.263954] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.272784] env[63028]: DEBUG oslo_vmware.api [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.320646] env[63028]: DEBUG nova.network.neutron [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1028.331975] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736110, 'name': ReconfigVM_Task, 'duration_secs': 0.174428} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.332350] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550837', 'volume_id': '996ec8ec-5318-4963-9384-330f1e01190d', 'name': 'volume-996ec8ec-5318-4963-9384-330f1e01190d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '85aafadb-81d6-4687-aed1-fbe829e5f95f', 'attached_at': '', 'detached_at': '', 'volume_id': '996ec8ec-5318-4963-9384-330f1e01190d', 'serial': '996ec8ec-5318-4963-9384-330f1e01190d'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1028.333073] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a9643ad-228d-4fad-8fca-da59a2be8ffd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.339403] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1028.339403] env[63028]: value = "task-2736114" [ 1028.339403] env[63028]: _type = "Task" [ 1028.339403] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.351563] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736114, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.439764] env[63028]: DEBUG nova.network.neutron [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Successfully created port: f5040918-0c62-4758-8f50-01303c2a0180 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1028.561945] env[63028]: INFO nova.compute.manager [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Detaching volume 15326c33-7e0b-41be-bf2e-5b82153cea0d [ 1028.610375] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074695} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.615173] env[63028]: INFO nova.virt.block_device [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Attempting to driver detach volume 15326c33-7e0b-41be-bf2e-5b82153cea0d from mountpoint /dev/sdb [ 1028.615421] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1028.615629] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550816', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'name': 'volume-15326c33-7e0b-41be-bf2e-5b82153cea0d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '46dc76bc-854f-46ad-9db5-21cf6f40fb21', 'attached_at': '', 'detached_at': '', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'serial': '15326c33-7e0b-41be-bf2e-5b82153cea0d'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1028.615914] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1028.616692] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c631d53a-b51b-4544-b313-ec4b1415151c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.619633] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c210348f-798d-40a1-ba6f-f83ce980a173 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.669877] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] c492dea4-9779-4460-a559-5b82fb0643f0/c492dea4-9779-4460-a559-5b82fb0643f0.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1028.675657] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f82852-054a-4694-bab2-d580a8206127 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.679203] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffe687b7-4034-48dd-af72-49394428bf2e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.701662] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1028.701662] env[63028]: value = "task-2736115" [ 1028.701662] env[63028]: _type = "Task" [ 1028.701662] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.702505] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e945369-69cd-4291-b358-6ab5720c8a1e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.730967] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03387b2d-5d42-4175-a57a-fb54d208916c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.734149] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736115, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.736090] env[63028]: DEBUG nova.scheduler.client.report [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1028.750526] env[63028]: DEBUG nova.compute.manager [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1028.752586] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] The volume has not been displaced from its original location: [datastore2] volume-15326c33-7e0b-41be-bf2e-5b82153cea0d/volume-15326c33-7e0b-41be-bf2e-5b82153cea0d.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1028.758029] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Reconfiguring VM instance instance-0000004e to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1028.759746] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a14b6a9-f1d5-4fe0-bb10-f73f49950b1f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.782488] env[63028]: DEBUG oslo_vmware.api [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.783911] env[63028]: DEBUG oslo_vmware.api [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1028.783911] env[63028]: value = "task-2736116" [ 1028.783911] env[63028]: _type = "Task" [ 1028.783911] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.798541] env[63028]: DEBUG oslo_vmware.api [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736116, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.800653] env[63028]: DEBUG nova.virt.hardware [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1028.800884] env[63028]: DEBUG nova.virt.hardware [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1028.801066] env[63028]: DEBUG nova.virt.hardware [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1028.801325] env[63028]: DEBUG nova.virt.hardware [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1028.801562] env[63028]: DEBUG nova.virt.hardware [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1028.801689] env[63028]: DEBUG nova.virt.hardware [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1028.801863] env[63028]: DEBUG nova.virt.hardware [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1028.802060] env[63028]: DEBUG nova.virt.hardware [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1028.803038] env[63028]: DEBUG nova.virt.hardware [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1028.803038] env[63028]: DEBUG nova.virt.hardware [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1028.803038] env[63028]: DEBUG nova.virt.hardware [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1028.804231] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ca3ec9-b84b-4c1b-9eb9-d4cbe8951c7f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.807770] env[63028]: DEBUG nova.network.neutron [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Updating instance_info_cache with network_info: [{"id": "677d4bb7-eef2-4be9-9671-9c92ee555906", "address": "fa:16:3e:7d:ea:86", "network": {"id": "a43c44c1-7a2d-490a-8521-a9880776ffa1", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2018288945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15fbb152d6224fc3928fc0b0cc9c0a29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap677d4bb7-ee", "ovs_interfaceid": "677d4bb7-eef2-4be9-9671-9c92ee555906", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.817503] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "3566ab6f-1f8a-472d-9efb-47fa2520a215" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.817503] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "3566ab6f-1f8a-472d-9efb-47fa2520a215" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.823034] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785aaeae-3963-4aca-ac31-49f05060b39f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.851430] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736114, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.857637] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "1cf111f2-df5e-48a6-905a-bc2d3ea45202" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.857873] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "1cf111f2-df5e-48a6-905a-bc2d3ea45202" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.099626] env[63028]: DEBUG nova.network.neutron [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Updating instance_info_cache with network_info: [{"id": "47a80dc6-e819-410f-8257-cfc2cce0d3d9", "address": "fa:16:3e:b7:57:e4", "network": {"id": "8fd5dd35-416e-45e2-aea5-8c2c22752ef7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1819490475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae448aa28a84aa6863fffc24a5448fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47a80dc6-e8", "ovs_interfaceid": "47a80dc6-e819-410f-8257-cfc2cce0d3d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.216038] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736115, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.238926] env[63028]: DEBUG nova.compute.manager [req-ce40d6e9-1e5a-442e-b20f-23d1a449248e req-86848474-ba70-4da6-a70a-86909d598f47 service nova] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Received event network-changed-677d4bb7-eef2-4be9-9671-9c92ee555906 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1029.239128] env[63028]: DEBUG nova.compute.manager [req-ce40d6e9-1e5a-442e-b20f-23d1a449248e req-86848474-ba70-4da6-a70a-86909d598f47 service nova] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Refreshing instance network info cache due to event network-changed-677d4bb7-eef2-4be9-9671-9c92ee555906. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1029.239331] env[63028]: DEBUG oslo_concurrency.lockutils [req-ce40d6e9-1e5a-442e-b20f-23d1a449248e req-86848474-ba70-4da6-a70a-86909d598f47 service nova] Acquiring lock "refresh_cache-da23282a-bbda-47bf-9d9c-337ee9996779" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.261735] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.538s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.261735] env[63028]: DEBUG nova.compute.manager [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1029.263607] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.847s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.264364] env[63028]: DEBUG nova.objects.instance [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'resources' on Instance uuid b77ba7d6-305e-4b60-a4b7-9353c12c3920 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1029.296625] env[63028]: DEBUG oslo_vmware.api [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Task: {'id': task-2736113, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.630486} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.297957] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1029.298063] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1029.298288] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1029.298541] env[63028]: INFO nova.compute.manager [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1029.298881] env[63028]: DEBUG oslo.service.loopingcall [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1029.303772] env[63028]: DEBUG nova.compute.manager [-] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1029.303913] env[63028]: DEBUG nova.network.neutron [-] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1029.306981] env[63028]: DEBUG oslo_vmware.api [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736116, 'name': ReconfigVM_Task, 'duration_secs': 0.250095} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.307677] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Reconfigured VM instance instance-0000004e to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1029.316612] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Releasing lock "refresh_cache-da23282a-bbda-47bf-9d9c-337ee9996779" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.317960] env[63028]: DEBUG nova.compute.manager [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Instance network_info: |[{"id": "677d4bb7-eef2-4be9-9671-9c92ee555906", "address": "fa:16:3e:7d:ea:86", "network": {"id": "a43c44c1-7a2d-490a-8521-a9880776ffa1", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2018288945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15fbb152d6224fc3928fc0b0cc9c0a29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap677d4bb7-ee", "ovs_interfaceid": "677d4bb7-eef2-4be9-9671-9c92ee555906", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1029.318413] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13869682-d810-45dc-a7b2-bcbdc2d51deb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.329087] env[63028]: DEBUG oslo_concurrency.lockutils [req-ce40d6e9-1e5a-442e-b20f-23d1a449248e req-86848474-ba70-4da6-a70a-86909d598f47 service nova] Acquired lock "refresh_cache-da23282a-bbda-47bf-9d9c-337ee9996779" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.329310] env[63028]: DEBUG nova.network.neutron [req-ce40d6e9-1e5a-442e-b20f-23d1a449248e req-86848474-ba70-4da6-a70a-86909d598f47 service nova] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Refreshing network info cache for port 677d4bb7-eef2-4be9-9671-9c92ee555906 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1029.331281] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:ea:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4cb37d4-2060-48b6-9e60-156a71fc7ee3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '677d4bb7-eef2-4be9-9671-9c92ee555906', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1029.338382] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Creating folder: Project (15fbb152d6224fc3928fc0b0cc9c0a29). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1029.338703] env[63028]: DEBUG nova.network.neutron [-] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.343553] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1029.345327] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4fe4cef4-7971-400d-a6ae-ed1f4c92f496 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.356805] env[63028]: DEBUG oslo_vmware.api [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1029.356805] env[63028]: value = "task-2736117" [ 1029.356805] env[63028]: _type = "Task" [ 1029.356805] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.363836] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1029.366538] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736114, 'name': Rename_Task, 'duration_secs': 0.535454} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.374023] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1029.374023] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Created folder: Project (15fbb152d6224fc3928fc0b0cc9c0a29) in parent group-v550570. [ 1029.374023] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Creating folder: Instances. Parent ref: group-v550848. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1029.375032] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d960d39d-c9e6-435a-8ec3-78e638a9e4fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.379251] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90881471-4aae-40bc-9886-3bd86949372a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.379566] env[63028]: DEBUG oslo_vmware.api [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736117, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.384784] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1029.384784] env[63028]: value = "task-2736119" [ 1029.384784] env[63028]: _type = "Task" [ 1029.384784] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.390655] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Created folder: Instances in parent group-v550848. [ 1029.390906] env[63028]: DEBUG oslo.service.loopingcall [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1029.391499] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1029.391740] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e87c54ec-61ae-476e-a6c4-ae671aa2b94a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.410291] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.416359] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1029.416359] env[63028]: value = "task-2736121" [ 1029.416359] env[63028]: _type = "Task" [ 1029.416359] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.429412] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736121, 'name': CreateVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.602562] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Releasing lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.717663] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736115, 'name': ReconfigVM_Task, 'duration_secs': 0.766207} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.717951] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Reconfigured VM instance instance-0000005f to attach disk [datastore1] c492dea4-9779-4460-a559-5b82fb0643f0/c492dea4-9779-4460-a559-5b82fb0643f0.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1029.718691] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3657acb0-3a0e-475b-9f6c-7cf9476135a2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.725510] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1029.725510] env[63028]: value = "task-2736122" [ 1029.725510] env[63028]: _type = "Task" [ 1029.725510] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.734962] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736122, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.768700] env[63028]: DEBUG nova.compute.utils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1029.776110] env[63028]: DEBUG nova.compute.manager [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1029.776338] env[63028]: DEBUG nova.network.neutron [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1029.785909] env[63028]: DEBUG nova.network.neutron [req-ce40d6e9-1e5a-442e-b20f-23d1a449248e req-86848474-ba70-4da6-a70a-86909d598f47 service nova] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Updated VIF entry in instance network info cache for port 677d4bb7-eef2-4be9-9671-9c92ee555906. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1029.789695] env[63028]: DEBUG nova.network.neutron [req-ce40d6e9-1e5a-442e-b20f-23d1a449248e req-86848474-ba70-4da6-a70a-86909d598f47 service nova] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Updating instance_info_cache with network_info: [{"id": "677d4bb7-eef2-4be9-9671-9c92ee555906", "address": "fa:16:3e:7d:ea:86", "network": {"id": "a43c44c1-7a2d-490a-8521-a9880776ffa1", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2018288945-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15fbb152d6224fc3928fc0b0cc9c0a29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap677d4bb7-ee", "ovs_interfaceid": "677d4bb7-eef2-4be9-9671-9c92ee555906", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.848478] env[63028]: INFO nova.compute.manager [-] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Took 2.55 seconds to deallocate network for instance. [ 1029.879304] env[63028]: DEBUG nova.policy [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebee998098894863bb772bd6b77fe4df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91a3f78ba4514500bfd4ed81b74526e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1029.900801] env[63028]: DEBUG oslo_vmware.api [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736117, 'name': ReconfigVM_Task, 'duration_secs': 0.188418} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.901093] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550816', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'name': 'volume-15326c33-7e0b-41be-bf2e-5b82153cea0d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '46dc76bc-854f-46ad-9db5-21cf6f40fb21', 'attached_at': '', 'detached_at': '', 'volume_id': '15326c33-7e0b-41be-bf2e-5b82153cea0d', 'serial': '15326c33-7e0b-41be-bf2e-5b82153cea0d'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1029.909927] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736119, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.914256] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.934017] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.943050] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736121, 'name': CreateVM_Task, 'duration_secs': 0.457529} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.946063] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1029.946063] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.946063] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.946063] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1029.946063] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aba9e515-4f54-42b8-a7ab-2fade3bd82f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.950147] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Waiting for the task: (returnval){ [ 1029.950147] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528debc4-0b6d-623a-e342-d74939dcb6db" [ 1029.950147] env[63028]: _type = "Task" [ 1029.950147] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.963028] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528debc4-0b6d-623a-e342-d74939dcb6db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.107182] env[63028]: DEBUG nova.compute.manager [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1030.108081] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4793f8dc-ee9d-4a18-b847-b35a385e22d8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.236531] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736122, 'name': Rename_Task, 'duration_secs': 0.167194} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.239713] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1030.240212] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17ea563d-3af6-4181-a4a3-2a08810d9b90 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.247251] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1030.247251] env[63028]: value = "task-2736123" [ 1030.247251] env[63028]: _type = "Task" [ 1030.247251] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.259781] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736123, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.276985] env[63028]: DEBUG nova.compute.manager [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1030.283028] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2fb2ee-a1d2-4f4f-8f8c-c6a7d4905114 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.288660] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2747c45c-c425-4b31-a570-0ffa051e7d57 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.294299] env[63028]: DEBUG oslo_concurrency.lockutils [req-ce40d6e9-1e5a-442e-b20f-23d1a449248e req-86848474-ba70-4da6-a70a-86909d598f47 service nova] Releasing lock "refresh_cache-da23282a-bbda-47bf-9d9c-337ee9996779" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.294299] env[63028]: DEBUG nova.compute.manager [req-ce40d6e9-1e5a-442e-b20f-23d1a449248e req-86848474-ba70-4da6-a70a-86909d598f47 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Received event network-vif-deleted-7c42e931-e162-4201-8483-8606a86e0dff {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1030.294299] env[63028]: INFO nova.compute.manager [req-ce40d6e9-1e5a-442e-b20f-23d1a449248e req-86848474-ba70-4da6-a70a-86909d598f47 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Neutron deleted interface 7c42e931-e162-4201-8483-8606a86e0dff; detaching it from the instance and deleting it from the info cache [ 1030.294299] env[63028]: DEBUG nova.network.neutron [req-ce40d6e9-1e5a-442e-b20f-23d1a449248e req-86848474-ba70-4da6-a70a-86909d598f47 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.324179] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a12cb3b-79ca-4858-b713-0f83d0becf4a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.333922] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876247a1-195a-4d65-a757-b9b510febcce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.348882] env[63028]: DEBUG nova.compute.provider_tree [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.368128] env[63028]: DEBUG oslo_concurrency.lockutils [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.388335] env[63028]: DEBUG nova.network.neutron [-] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.402342] env[63028]: DEBUG oslo_vmware.api [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736119, 'name': PowerOnVM_Task, 'duration_secs': 0.583847} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.404135] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1030.460873] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528debc4-0b6d-623a-e342-d74939dcb6db, 'name': SearchDatastore_Task, 'duration_secs': 0.017645} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.461211] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.461448] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1030.461683] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.461831] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.462015] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1030.462283] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce709095-deef-41f2-9956-c4c60ee5234c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.471694] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1030.471910] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1030.473042] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d727bc67-8f4a-44a9-962f-637af73dc1a7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.479037] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Waiting for the task: (returnval){ [ 1030.479037] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b6b031-310e-fa89-f1a7-bb01188ebb9d" [ 1030.479037] env[63028]: _type = "Task" [ 1030.479037] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.484197] env[63028]: DEBUG nova.objects.instance [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lazy-loading 'flavor' on Instance uuid 46dc76bc-854f-46ad-9db5-21cf6f40fb21 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.486449] env[63028]: DEBUG nova.network.neutron [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Successfully created port: 33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1030.493696] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b6b031-310e-fa89-f1a7-bb01188ebb9d, 'name': SearchDatastore_Task, 'duration_secs': 0.010627} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.494717] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2763aaa5-f8d4-447e-bccb-2bb262c5ffad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.499876] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Waiting for the task: (returnval){ [ 1030.499876] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5209421d-9b5e-3fd3-9119-dbcb5f971086" [ 1030.499876] env[63028]: _type = "Task" [ 1030.499876] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.509032] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5209421d-9b5e-3fd3-9119-dbcb5f971086, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.541578] env[63028]: DEBUG nova.compute.manager [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1030.542546] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7638c165-0c19-4b5f-b5b3-44a0bfd8ae6e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.716573] env[63028]: DEBUG nova.network.neutron [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Successfully updated port: f5040918-0c62-4758-8f50-01303c2a0180 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.760436] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736123, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.796138] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7069ee6-a281-4db9-9c90-d434b3778cc4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.805637] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd1d27f-076c-4026-9d99-2939c14d0ec0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.840164] env[63028]: DEBUG nova.compute.manager [req-ce40d6e9-1e5a-442e-b20f-23d1a449248e req-86848474-ba70-4da6-a70a-86909d598f47 service nova] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Detach interface failed, port_id=7c42e931-e162-4201-8483-8606a86e0dff, reason: Instance c0693e4c-30b2-4eda-be1e-f6186d78038b could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1030.852765] env[63028]: DEBUG nova.scheduler.client.report [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1030.897566] env[63028]: INFO nova.compute.manager [-] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Took 1.59 seconds to deallocate network for instance. [ 1031.011285] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5209421d-9b5e-3fd3-9119-dbcb5f971086, 'name': SearchDatastore_Task, 'duration_secs': 0.009367} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.011564] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.011823] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] da23282a-bbda-47bf-9d9c-337ee9996779/da23282a-bbda-47bf-9d9c-337ee9996779.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1031.012173] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-693b8cfd-7700-4f92-8c0b-54aae564ed84 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.019198] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Waiting for the task: (returnval){ [ 1031.019198] env[63028]: value = "task-2736124" [ 1031.019198] env[63028]: _type = "Task" [ 1031.019198] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.028058] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.061092] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f9a982f3-cd3d-42a6-b07f-e8c7604311ee tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 38.380s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.129173] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92087f7d-8c3b-43d9-ae9b-627a363c0d37 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.136258] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Doing hard reboot of VM {{(pid=63028) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1031.136510] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-fab56edf-c844-462a-863e-68fd33ae384d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.141890] env[63028]: DEBUG oslo_vmware.api [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1031.141890] env[63028]: value = "task-2736125" [ 1031.141890] env[63028]: _type = "Task" [ 1031.141890] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.149062] env[63028]: DEBUG oslo_vmware.api [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736125, 'name': ResetVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.220234] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "refresh_cache-b3930760-1888-4f80-85d8-65120a25f275" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.220234] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "refresh_cache-b3930760-1888-4f80-85d8-65120a25f275" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.220234] env[63028]: DEBUG nova.network.neutron [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.259891] env[63028]: DEBUG oslo_vmware.api [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736123, 'name': PowerOnVM_Task, 'duration_secs': 0.906579} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.260232] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1031.260970] env[63028]: INFO nova.compute.manager [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Took 9.53 seconds to spawn the instance on the hypervisor. [ 1031.260970] env[63028]: DEBUG nova.compute.manager [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1031.261749] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf5c9c2-4f49-439e-8936-d4562165d329 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.274761] env[63028]: DEBUG nova.compute.manager [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Received event network-vif-deleted-296dfd9e-84e1-4ea8-bd17-28920a6a048b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1031.275165] env[63028]: DEBUG nova.compute.manager [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] [instance: b3930760-1888-4f80-85d8-65120a25f275] Received event network-vif-plugged-f5040918-0c62-4758-8f50-01303c2a0180 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1031.275415] env[63028]: DEBUG oslo_concurrency.lockutils [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] Acquiring lock "b3930760-1888-4f80-85d8-65120a25f275-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.275676] env[63028]: DEBUG oslo_concurrency.lockutils [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] Lock "b3930760-1888-4f80-85d8-65120a25f275-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.275879] env[63028]: DEBUG oslo_concurrency.lockutils [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] Lock "b3930760-1888-4f80-85d8-65120a25f275-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.276088] env[63028]: DEBUG nova.compute.manager [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] [instance: b3930760-1888-4f80-85d8-65120a25f275] No waiting events found dispatching network-vif-plugged-f5040918-0c62-4758-8f50-01303c2a0180 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1031.276318] env[63028]: WARNING nova.compute.manager [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] [instance: b3930760-1888-4f80-85d8-65120a25f275] Received unexpected event network-vif-plugged-f5040918-0c62-4758-8f50-01303c2a0180 for instance with vm_state building and task_state spawning. [ 1031.276519] env[63028]: DEBUG nova.compute.manager [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] [instance: b3930760-1888-4f80-85d8-65120a25f275] Received event network-changed-f5040918-0c62-4758-8f50-01303c2a0180 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1031.276706] env[63028]: DEBUG nova.compute.manager [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] [instance: b3930760-1888-4f80-85d8-65120a25f275] Refreshing instance network info cache due to event network-changed-f5040918-0c62-4758-8f50-01303c2a0180. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1031.276916] env[63028]: DEBUG oslo_concurrency.lockutils [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] Acquiring lock "refresh_cache-b3930760-1888-4f80-85d8-65120a25f275" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.296467] env[63028]: DEBUG nova.compute.manager [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1031.331781] env[63028]: DEBUG nova.virt.hardware [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1031.331781] env[63028]: DEBUG nova.virt.hardware [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1031.331781] env[63028]: DEBUG nova.virt.hardware [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1031.331781] env[63028]: DEBUG nova.virt.hardware [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1031.331781] env[63028]: DEBUG nova.virt.hardware [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1031.332042] env[63028]: DEBUG nova.virt.hardware [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1031.332263] env[63028]: DEBUG nova.virt.hardware [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1031.332477] env[63028]: DEBUG nova.virt.hardware [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1031.332697] env[63028]: DEBUG nova.virt.hardware [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1031.333023] env[63028]: DEBUG nova.virt.hardware [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1031.333275] env[63028]: DEBUG nova.virt.hardware [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1031.334885] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7703e58d-d264-45b6-84fd-952364ded7c3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.346221] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afb2344-8036-44e7-a9f9-7728d88b1f5a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.367689] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.370389] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 10.002s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.399340] env[63028]: INFO nova.scheduler.client.report [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Deleted allocations for instance b77ba7d6-305e-4b60-a4b7-9353c12c3920 [ 1031.403897] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.494807] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2e14d1a-0ade-4b1f-a23f-8bb884e7b3df tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.439s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.533541] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736124, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.651414] env[63028]: DEBUG oslo_vmware.api [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736125, 'name': ResetVM_Task, 'duration_secs': 0.11675} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.651694] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Did hard reboot of VM {{(pid=63028) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1031.651886] env[63028]: DEBUG nova.compute.manager [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1031.652687] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d435008a-a4d1-4ad2-ab28-136f5a16d1ab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.761817] env[63028]: DEBUG nova.network.neutron [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.786882] env[63028]: INFO nova.compute.manager [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Took 26.68 seconds to build instance. [ 1031.878947] env[63028]: INFO nova.compute.claims [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1031.909018] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7877c216-7073-4971-8e3d-6be7dc600ebf tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.848s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.910493] env[63028]: DEBUG oslo_concurrency.lockutils [req-76fb598a-587e-4cd6-bdcf-5e759089563c req-ff420fb6-6a3f-480c-aba2-2852092b4662 service nova] Acquired lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.911584] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60624281-e645-4d29-9c17-373b667a7975 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.920954] env[63028]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1031.921165] env[63028]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=63028) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1031.921804] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ae3420a-af49-4c6d-b892-477819261a0f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.926238] env[63028]: DEBUG nova.network.neutron [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Updating instance_info_cache with network_info: [{"id": "f5040918-0c62-4758-8f50-01303c2a0180", "address": "fa:16:3e:fb:0e:d5", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5040918-0c", "ovs_interfaceid": "f5040918-0c62-4758-8f50-01303c2a0180", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.933954] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e722296f-3540-48c0-b256-e336adf94b32 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.974017] env[63028]: ERROR root [req-76fb598a-587e-4cd6-bdcf-5e759089563c req-ff420fb6-6a3f-480c-aba2-2852092b4662 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-550794' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-550794' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-550794' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-550794'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-550794' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-550794' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-550794'}\n"]: nova.exception.InstanceNotFound: Instance b77ba7d6-305e-4b60-a4b7-9353c12c3920 could not be found. [ 1031.974243] env[63028]: DEBUG oslo_concurrency.lockutils [req-76fb598a-587e-4cd6-bdcf-5e759089563c req-ff420fb6-6a3f-480c-aba2-2852092b4662 service nova] Releasing lock "b77ba7d6-305e-4b60-a4b7-9353c12c3920" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.974464] env[63028]: DEBUG nova.compute.manager [req-76fb598a-587e-4cd6-bdcf-5e759089563c req-ff420fb6-6a3f-480c-aba2-2852092b4662 service nova] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Detach interface failed, port_id=d0308a48-57ab-41f7-bbab-6871ed89c5f2, reason: Instance b77ba7d6-305e-4b60-a4b7-9353c12c3920 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1032.029982] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736124, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647219} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.030323] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] da23282a-bbda-47bf-9d9c-337ee9996779/da23282a-bbda-47bf-9d9c-337ee9996779.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1032.030541] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1032.030788] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-781c563e-d660-447f-8f68-21ba315ce854 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.036928] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Waiting for the task: (returnval){ [ 1032.036928] env[63028]: value = "task-2736126" [ 1032.036928] env[63028]: _type = "Task" [ 1032.036928] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.044250] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736126, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.165670] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0b092587-9660-410c-9b74-a77157aa502f tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.838s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.260854] env[63028]: DEBUG nova.network.neutron [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Successfully updated port: 33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1032.288998] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7e0e6b37-3dae-4280-9cd6-a7e57ab79152 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "c492dea4-9779-4460-a559-5b82fb0643f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.196s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.386957] env[63028]: INFO nova.compute.resource_tracker [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating resource usage from migration 17ac8e61-a974-40df-9f8f-cfcc2c503ee0 [ 1032.430395] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "refresh_cache-b3930760-1888-4f80-85d8-65120a25f275" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.430922] env[63028]: DEBUG nova.compute.manager [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Instance network_info: |[{"id": "f5040918-0c62-4758-8f50-01303c2a0180", "address": "fa:16:3e:fb:0e:d5", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5040918-0c", "ovs_interfaceid": "f5040918-0c62-4758-8f50-01303c2a0180", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1032.431317] env[63028]: DEBUG oslo_concurrency.lockutils [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] Acquired lock "refresh_cache-b3930760-1888-4f80-85d8-65120a25f275" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.431508] env[63028]: DEBUG nova.network.neutron [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] [instance: b3930760-1888-4f80-85d8-65120a25f275] Refreshing network info cache for port f5040918-0c62-4758-8f50-01303c2a0180 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1032.433298] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:0e:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '706c9762-1cf8-4770-897d-377d0d927773', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5040918-0c62-4758-8f50-01303c2a0180', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.440874] env[63028]: DEBUG oslo.service.loopingcall [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1032.444579] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3930760-1888-4f80-85d8-65120a25f275] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1032.445036] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a986e755-9ff3-42e0-8dbb-ba3bac497675 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.466757] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.466757] env[63028]: value = "task-2736127" [ 1032.466757] env[63028]: _type = "Task" [ 1032.466757] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.474758] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736127, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.557110] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736126, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082978} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.557110] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1032.561584] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1348c096-ff99-4966-961e-33628ed10373 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.588642] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] da23282a-bbda-47bf-9d9c-337ee9996779/da23282a-bbda-47bf-9d9c-337ee9996779.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1032.591377] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79a25b40-916a-4e91-8564-92d3d1520066 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.612378] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Waiting for the task: (returnval){ [ 1032.612378] env[63028]: value = "task-2736128" [ 1032.612378] env[63028]: _type = "Task" [ 1032.612378] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.620771] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736128, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.717720] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e65901e3-61b0-412f-8338-4fae1d455bce tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "c492dea4-9779-4460-a559-5b82fb0643f0" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.717901] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e65901e3-61b0-412f-8338-4fae1d455bce tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "c492dea4-9779-4460-a559-5b82fb0643f0" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.718148] env[63028]: DEBUG nova.compute.manager [None req-e65901e3-61b0-412f-8338-4fae1d455bce tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1032.719268] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01e6ff6-35a2-46cc-a0a3-2dab903cc61e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.725813] env[63028]: DEBUG nova.compute.manager [None req-e65901e3-61b0-412f-8338-4fae1d455bce tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63028) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1032.726421] env[63028]: DEBUG nova.objects.instance [None req-e65901e3-61b0-412f-8338-4fae1d455bce tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lazy-loading 'flavor' on Instance uuid c492dea4-9779-4460-a559-5b82fb0643f0 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.763599] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "refresh_cache-f804ec95-0b97-4960-844d-b678b97fc401" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.764214] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquired lock "refresh_cache-f804ec95-0b97-4960-844d-b678b97fc401" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.764214] env[63028]: DEBUG nova.network.neutron [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.797577] env[63028]: DEBUG oslo_concurrency.lockutils [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.797577] env[63028]: DEBUG oslo_concurrency.lockutils [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.797577] env[63028]: DEBUG oslo_concurrency.lockutils [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.797577] env[63028]: DEBUG oslo_concurrency.lockutils [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.797577] env[63028]: DEBUG oslo_concurrency.lockutils [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.800762] env[63028]: INFO nova.compute.manager [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Terminating instance [ 1032.813913] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7699347f-82a2-4de6-92c1-d32a573e3389 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.825346] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a3530d-b874-46d0-8f00-88a381f9a0bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.860257] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d829f42b-7d28-4ed1-920e-c5aacb8222f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.867007] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a21a5b5-cca9-4d3a-8471-804ec1377480 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.887007] env[63028]: DEBUG nova.compute.provider_tree [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.977465] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736127, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.123649] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736128, 'name': ReconfigVM_Task, 'duration_secs': 0.474973} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.123982] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Reconfigured VM instance instance-00000060 to attach disk [datastore1] da23282a-bbda-47bf-9d9c-337ee9996779/da23282a-bbda-47bf-9d9c-337ee9996779.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1033.124736] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e5c9ad6-a0ff-45aa-9542-1479cccf98bf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.131395] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Waiting for the task: (returnval){ [ 1033.131395] env[63028]: value = "task-2736129" [ 1033.131395] env[63028]: _type = "Task" [ 1033.131395] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.140096] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736129, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.172323] env[63028]: DEBUG oslo_concurrency.lockutils [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "d41a1eae-bb89-4222-9466-d86af891c654" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.172522] env[63028]: DEBUG oslo_concurrency.lockutils [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "d41a1eae-bb89-4222-9466-d86af891c654" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.213504] env[63028]: DEBUG nova.network.neutron [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] [instance: b3930760-1888-4f80-85d8-65120a25f275] Updated VIF entry in instance network info cache for port f5040918-0c62-4758-8f50-01303c2a0180. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1033.213872] env[63028]: DEBUG nova.network.neutron [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] [instance: b3930760-1888-4f80-85d8-65120a25f275] Updating instance_info_cache with network_info: [{"id": "f5040918-0c62-4758-8f50-01303c2a0180", "address": "fa:16:3e:fb:0e:d5", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5040918-0c", "ovs_interfaceid": "f5040918-0c62-4758-8f50-01303c2a0180", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.298875] env[63028]: DEBUG nova.network.neutron [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1033.304859] env[63028]: DEBUG nova.compute.manager [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1033.305104] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1033.306626] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84eef503-b2c8-4d96-8a59-66b92b12f48a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.316768] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1033.316768] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3cfd716f-f315-487d-bcc6-b60d472fe300 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.323592] env[63028]: DEBUG oslo_vmware.api [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1033.323592] env[63028]: value = "task-2736130" [ 1033.323592] env[63028]: _type = "Task" [ 1033.323592] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.332118] env[63028]: DEBUG oslo_vmware.api [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736130, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.392986] env[63028]: DEBUG nova.scheduler.client.report [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1033.478452] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736127, 'name': CreateVM_Task, 'duration_secs': 0.515375} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.478908] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3930760-1888-4f80-85d8-65120a25f275] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1033.479647] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.479922] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.480268] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1033.481354] env[63028]: DEBUG nova.network.neutron [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Updating instance_info_cache with network_info: [{"id": "33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e", "address": "fa:16:3e:ee:58:09", "network": {"id": "e3a8845b-9fc6-46bd-8272-501135c875ad", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2047431351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f78ba4514500bfd4ed81b74526e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33f3a6b3-ea", "ovs_interfaceid": "33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.482569] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09221124-8aa4-4dca-80e5-9e01081a4f86 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.489383] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1033.489383] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526aa7d0-6dcd-464e-1f6a-05cad452cbe1" [ 1033.489383] env[63028]: _type = "Task" [ 1033.489383] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.499364] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526aa7d0-6dcd-464e-1f6a-05cad452cbe1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.555882] env[63028]: DEBUG nova.compute.manager [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Received event network-vif-plugged-33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1033.555882] env[63028]: DEBUG oslo_concurrency.lockutils [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] Acquiring lock "f804ec95-0b97-4960-844d-b678b97fc401-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.555882] env[63028]: DEBUG oslo_concurrency.lockutils [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] Lock "f804ec95-0b97-4960-844d-b678b97fc401-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.555882] env[63028]: DEBUG oslo_concurrency.lockutils [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] Lock "f804ec95-0b97-4960-844d-b678b97fc401-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.556071] env[63028]: DEBUG nova.compute.manager [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] No waiting events found dispatching network-vif-plugged-33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1033.556099] env[63028]: WARNING nova.compute.manager [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Received unexpected event network-vif-plugged-33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e for instance with vm_state building and task_state spawning. [ 1033.556251] env[63028]: DEBUG nova.compute.manager [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Received event network-changed-33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1033.556398] env[63028]: DEBUG nova.compute.manager [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Refreshing instance network info cache due to event network-changed-33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1033.556557] env[63028]: DEBUG oslo_concurrency.lockutils [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] Acquiring lock "refresh_cache-f804ec95-0b97-4960-844d-b678b97fc401" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.564240] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.564639] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.564763] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.564949] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.565186] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.567201] env[63028]: INFO nova.compute.manager [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Terminating instance [ 1033.641396] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736129, 'name': Rename_Task, 'duration_secs': 0.308218} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.641674] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1033.641961] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22f3e0fe-a32f-420b-9996-197ec7dd114c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.648818] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Waiting for the task: (returnval){ [ 1033.648818] env[63028]: value = "task-2736131" [ 1033.648818] env[63028]: _type = "Task" [ 1033.648818] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.660320] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.675719] env[63028]: DEBUG nova.compute.utils [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1033.717086] env[63028]: DEBUG oslo_concurrency.lockutils [req-53f66148-db46-4291-bb4d-4d5bab83afcc req-424d7e59-7ae1-4660-81b8-47df7aa958a2 service nova] Releasing lock "refresh_cache-b3930760-1888-4f80-85d8-65120a25f275" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.733173] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65901e3-61b0-412f-8338-4fae1d455bce tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1033.733771] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07846b5f-5b86-4f51-9c90-3af06a19fc34 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.740337] env[63028]: DEBUG oslo_vmware.api [None req-e65901e3-61b0-412f-8338-4fae1d455bce tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1033.740337] env[63028]: value = "task-2736132" [ 1033.740337] env[63028]: _type = "Task" [ 1033.740337] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.748891] env[63028]: DEBUG oslo_vmware.api [None req-e65901e3-61b0-412f-8338-4fae1d455bce tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736132, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.835519] env[63028]: DEBUG oslo_vmware.api [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736130, 'name': PowerOffVM_Task, 'duration_secs': 0.187353} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.835838] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1033.836018] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1033.836272] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1befc04a-8e04-442a-a015-b00d10623e7a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.898100] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.528s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.898428] env[63028]: INFO nova.compute.manager [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Migrating [ 1033.906354] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.133s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.907934] env[63028]: INFO nova.compute.claims [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1033.910420] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1033.910617] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1033.910800] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleting the datastore file [datastore1] 46dc76bc-854f-46ad-9db5-21cf6f40fb21 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1033.913624] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-149f3159-fd19-4953-8957-3893624affdc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.927167] env[63028]: DEBUG oslo_vmware.api [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1033.927167] env[63028]: value = "task-2736134" [ 1033.927167] env[63028]: _type = "Task" [ 1033.927167] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.936221] env[63028]: DEBUG oslo_vmware.api [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736134, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.985793] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Releasing lock "refresh_cache-f804ec95-0b97-4960-844d-b678b97fc401" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.986131] env[63028]: DEBUG nova.compute.manager [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Instance network_info: |[{"id": "33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e", "address": "fa:16:3e:ee:58:09", "network": {"id": "e3a8845b-9fc6-46bd-8272-501135c875ad", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2047431351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f78ba4514500bfd4ed81b74526e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33f3a6b3-ea", "ovs_interfaceid": "33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1033.986469] env[63028]: DEBUG oslo_concurrency.lockutils [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] Acquired lock "refresh_cache-f804ec95-0b97-4960-844d-b678b97fc401" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.986703] env[63028]: DEBUG nova.network.neutron [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Refreshing network info cache for port 33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1033.988400] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:58:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f68ebd2a-3c68-48db-8c32-8a01497fc2e7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.996273] env[63028]: DEBUG oslo.service.loopingcall [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.997534] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1034.001268] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b930970-b2ac-4cef-ac8e-0058c8fcb962 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.023246] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526aa7d0-6dcd-464e-1f6a-05cad452cbe1, 'name': SearchDatastore_Task, 'duration_secs': 0.021903} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.024582] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.024831] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.025125] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.025279] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.025460] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.025723] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.025723] env[63028]: value = "task-2736135" [ 1034.025723] env[63028]: _type = "Task" [ 1034.025723] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.025918] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b1d55b1-be80-4845-95ce-ba2eb9cff698 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.036613] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736135, 'name': CreateVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.044971] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.045183] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1034.045980] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a612f18-c08a-497b-a7bc-45dc9abad3da {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.051860] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1034.051860] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52417e5a-5f5d-7b23-4e00-37229a138488" [ 1034.051860] env[63028]: _type = "Task" [ 1034.051860] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.061040] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52417e5a-5f5d-7b23-4e00-37229a138488, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.071258] env[63028]: DEBUG nova.compute.manager [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1034.071538] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1034.072422] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5281b54a-525b-4ab9-941f-74d889b8caea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.079253] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1034.079487] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39ef6434-70ab-40e7-af4f-197b15ebae4f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.085567] env[63028]: DEBUG oslo_vmware.api [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1034.085567] env[63028]: value = "task-2736136" [ 1034.085567] env[63028]: _type = "Task" [ 1034.085567] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.095266] env[63028]: DEBUG oslo_vmware.api [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736136, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.159161] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736131, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.179011] env[63028]: DEBUG oslo_concurrency.lockutils [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "d41a1eae-bb89-4222-9466-d86af891c654" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.251553] env[63028]: DEBUG oslo_vmware.api [None req-e65901e3-61b0-412f-8338-4fae1d455bce tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736132, 'name': PowerOffVM_Task, 'duration_secs': 0.298552} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.251906] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65901e3-61b0-412f-8338-4fae1d455bce tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1034.252189] env[63028]: DEBUG nova.compute.manager [None req-e65901e3-61b0-412f-8338-4fae1d455bce tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1034.253122] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e922f0a9-0887-4a68-8e1e-a008655d3aaa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.422675] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.422675] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.422961] env[63028]: DEBUG nova.network.neutron [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1034.438364] env[63028]: DEBUG oslo_vmware.api [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736134, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33809} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.438770] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1034.439917] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1034.439917] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1034.439917] env[63028]: INFO nova.compute.manager [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1034.439917] env[63028]: DEBUG oslo.service.loopingcall [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.439917] env[63028]: DEBUG nova.compute.manager [-] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1034.440158] env[63028]: DEBUG nova.network.neutron [-] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1034.540098] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736135, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.565176] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52417e5a-5f5d-7b23-4e00-37229a138488, 'name': SearchDatastore_Task, 'duration_secs': 0.012389} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.566074] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ef84421-5c39-4e01-a26c-f83441197f5c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.571420] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1034.571420] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ec293b-bda6-772b-6677-ad1d7a6bb82e" [ 1034.571420] env[63028]: _type = "Task" [ 1034.571420] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.580461] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ec293b-bda6-772b-6677-ad1d7a6bb82e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.595328] env[63028]: DEBUG oslo_vmware.api [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736136, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.659440] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736131, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.766786] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e65901e3-61b0-412f-8338-4fae1d455bce tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "c492dea4-9779-4460-a559-5b82fb0643f0" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.049s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.842820] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "e048cadf-9dc1-4eb7-a825-422d0736231c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.842928] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "e048cadf-9dc1-4eb7-a825-422d0736231c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.924309] env[63028]: DEBUG nova.network.neutron [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Updated VIF entry in instance network info cache for port 33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1034.924763] env[63028]: DEBUG nova.network.neutron [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Updating instance_info_cache with network_info: [{"id": "33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e", "address": "fa:16:3e:ee:58:09", "network": {"id": "e3a8845b-9fc6-46bd-8272-501135c875ad", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2047431351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f78ba4514500bfd4ed81b74526e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33f3a6b3-ea", "ovs_interfaceid": "33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.038029] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736135, 'name': CreateVM_Task, 'duration_secs': 0.561392} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.038343] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1035.039190] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.039352] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.040134] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1035.042227] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73d2d213-20ed-44bb-bb1d-78a6c8fea643 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.047128] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1035.047128] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5244d975-143f-f42f-51a6-2aa56fbbf043" [ 1035.047128] env[63028]: _type = "Task" [ 1035.047128] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.055464] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5244d975-143f-f42f-51a6-2aa56fbbf043, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.086473] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ec293b-bda6-772b-6677-ad1d7a6bb82e, 'name': SearchDatastore_Task, 'duration_secs': 0.009609} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.089372] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.089628] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] b3930760-1888-4f80-85d8-65120a25f275/b3930760-1888-4f80-85d8-65120a25f275.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1035.089944] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02558a4a-68ed-4520-aa9e-0492a6a4e63b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.108652] env[63028]: DEBUG oslo_vmware.api [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736136, 'name': PowerOffVM_Task, 'duration_secs': 0.514641} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.108652] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1035.108652] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1035.108652] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72d61b06-61af-45ba-aea3-f30a72d66d27 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.111416] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1035.111416] env[63028]: value = "task-2736137" [ 1035.111416] env[63028]: _type = "Task" [ 1035.111416] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.122357] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.166717] env[63028]: DEBUG oslo_vmware.api [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736131, 'name': PowerOnVM_Task, 'duration_secs': 1.236663} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.166986] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1035.167224] env[63028]: INFO nova.compute.manager [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Took 8.97 seconds to spawn the instance on the hypervisor. [ 1035.167407] env[63028]: DEBUG nova.compute.manager [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1035.168230] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717d1c4d-0652-41d0-b7b2-3d4e90160368 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.172836] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1035.173070] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1035.173252] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Deleting the datastore file [datastore2] b16d85d7-13f3-4be0-8495-2fd2c1476f01 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1035.176483] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d5e9baa-8446-46f0-a701-db67e16ad0e2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.187735] env[63028]: DEBUG oslo_vmware.api [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1035.187735] env[63028]: value = "task-2736139" [ 1035.187735] env[63028]: _type = "Task" [ 1035.187735] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.201385] env[63028]: DEBUG oslo_vmware.api [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736139, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.250011] env[63028]: DEBUG oslo_concurrency.lockutils [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "d41a1eae-bb89-4222-9466-d86af891c654" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.250308] env[63028]: DEBUG oslo_concurrency.lockutils [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "d41a1eae-bb89-4222-9466-d86af891c654" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.250557] env[63028]: INFO nova.compute.manager [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Attaching volume fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6 to /dev/sdb [ 1035.300622] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb998e54-59a4-46ba-bfb1-65c93bee9543 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.311846] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9258851-a44b-4b7a-8334-df3e2a68866c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.329813] env[63028]: DEBUG nova.virt.block_device [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating existing volume attachment record: 62b1f113-5153-49b3-8c46-ca63ea84132e {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1035.334451] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674033a2-ef17-4f21-9e41-3309b38f8468 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.343061] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8647965-f211-4cba-a75d-37f7c8486aad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.347205] env[63028]: DEBUG nova.compute.manager [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1035.379551] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87066db4-09fa-4c97-b88e-e5296d7bd06a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.388917] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8092823c-5e3b-46d8-80da-373fa66cca13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.407579] env[63028]: DEBUG nova.compute.provider_tree [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.434330] env[63028]: DEBUG oslo_concurrency.lockutils [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] Releasing lock "refresh_cache-f804ec95-0b97-4960-844d-b678b97fc401" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.434330] env[63028]: DEBUG nova.compute.manager [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Received event network-changed-47a80dc6-e819-410f-8257-cfc2cce0d3d9 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1035.434330] env[63028]: DEBUG nova.compute.manager [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Refreshing instance network info cache due to event network-changed-47a80dc6-e819-410f-8257-cfc2cce0d3d9. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1035.434330] env[63028]: DEBUG oslo_concurrency.lockutils [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] Acquiring lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.434330] env[63028]: DEBUG oslo_concurrency.lockutils [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] Acquired lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.434330] env[63028]: DEBUG nova.network.neutron [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Refreshing network info cache for port 47a80dc6-e819-410f-8257-cfc2cce0d3d9 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1035.558621] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5244d975-143f-f42f-51a6-2aa56fbbf043, 'name': SearchDatastore_Task, 'duration_secs': 0.016921} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.559137] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.559512] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.559977] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.560390] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.560695] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.561104] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ce0dec6-3404-452b-8c42-9a24cf571c77 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.578251] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.578251] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1035.578251] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-124ce976-31e2-4f87-a3ce-9eaefffc990a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.584680] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1035.584680] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52693ac7-6dcd-c029-d21f-0f3a19d37322" [ 1035.584680] env[63028]: _type = "Task" [ 1035.584680] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.599785] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52693ac7-6dcd-c029-d21f-0f3a19d37322, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.609415] env[63028]: DEBUG nova.compute.manager [req-142e4884-6b9f-492b-a79b-674a36c32c99 req-4fda0b00-5639-4ba3-be1b-6f500c6ab621 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Received event network-vif-deleted-11a8272a-a9ff-4d48-860e-8ee1b781a6ab {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1035.609415] env[63028]: INFO nova.compute.manager [req-142e4884-6b9f-492b-a79b-674a36c32c99 req-4fda0b00-5639-4ba3-be1b-6f500c6ab621 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Neutron deleted interface 11a8272a-a9ff-4d48-860e-8ee1b781a6ab; detaching it from the instance and deleting it from the info cache [ 1035.609415] env[63028]: DEBUG nova.network.neutron [req-142e4884-6b9f-492b-a79b-674a36c32c99 req-4fda0b00-5639-4ba3-be1b-6f500c6ab621 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.618213] env[63028]: DEBUG nova.network.neutron [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance_info_cache with network_info: [{"id": "1f5c01d1-9623-425e-8309-336dd1d961fa", "address": "fa:16:3e:7a:87:25", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f5c01d1-96", "ovs_interfaceid": "1f5c01d1-9623-425e-8309-336dd1d961fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.627469] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736137, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.647915] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "c492dea4-9779-4460-a559-5b82fb0643f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.648266] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "c492dea4-9779-4460-a559-5b82fb0643f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.649185] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "c492dea4-9779-4460-a559-5b82fb0643f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.649396] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "c492dea4-9779-4460-a559-5b82fb0643f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.649620] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "c492dea4-9779-4460-a559-5b82fb0643f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.652036] env[63028]: INFO nova.compute.manager [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Terminating instance [ 1035.696136] env[63028]: INFO nova.compute.manager [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Took 29.52 seconds to build instance. [ 1035.701436] env[63028]: DEBUG oslo_vmware.api [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736139, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.374708} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.701436] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1035.701436] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1035.701599] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1035.701835] env[63028]: INFO nova.compute.manager [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1035.702091] env[63028]: DEBUG oslo.service.loopingcall [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1035.702272] env[63028]: DEBUG nova.compute.manager [-] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1035.702363] env[63028]: DEBUG nova.network.neutron [-] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1035.883741] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.914636] env[63028]: DEBUG nova.scheduler.client.report [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1036.093877] env[63028]: DEBUG nova.network.neutron [-] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.110613] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52693ac7-6dcd-c029-d21f-0f3a19d37322, 'name': SearchDatastore_Task, 'duration_secs': 0.055875} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.112165] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49d4e118-fde4-422d-8241-09d855a38553 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.126037] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a62e996a-f160-4d38-bd54-f788077b6d57 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.127993] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.130126] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1036.130126] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c9bbbc-d5d9-87e2-0285-e9ff1769fadc" [ 1036.130126] env[63028]: _type = "Task" [ 1036.130126] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.130126] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736137, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60473} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.130418] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] b3930760-1888-4f80-85d8-65120a25f275/b3930760-1888-4f80-85d8-65120a25f275.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1036.131227] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1036.134753] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-313a28c5-7e00-466a-a1c9-0f247103410c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.140442] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b00ca3-ec32-4b26-a16e-d3b6d3419ce7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.163218] env[63028]: DEBUG nova.compute.manager [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1036.163459] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.163821] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1036.163821] env[63028]: value = "task-2736143" [ 1036.163821] env[63028]: _type = "Task" [ 1036.163821] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.164060] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c9bbbc-d5d9-87e2-0285-e9ff1769fadc, 'name': SearchDatastore_Task, 'duration_secs': 0.012647} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.165492] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4325bcb-33d0-4d59-bf65-9a8c6c085989 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.169029] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.169029] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] f804ec95-0b97-4960-844d-b678b97fc401/f804ec95-0b97-4960-844d-b678b97fc401.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1036.176018] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21a78ad9-f49d-474a-85a9-b6f48fa01af9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.190106] env[63028]: DEBUG nova.compute.manager [req-142e4884-6b9f-492b-a79b-674a36c32c99 req-4fda0b00-5639-4ba3-be1b-6f500c6ab621 service nova] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Detach interface failed, port_id=11a8272a-a9ff-4d48-860e-8ee1b781a6ab, reason: Instance 46dc76bc-854f-46ad-9db5-21cf6f40fb21 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1036.194721] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.198385] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec9c63b5-9bdb-4f58-8b6f-a5f208ae6dae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.199951] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736143, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.200331] env[63028]: DEBUG oslo_concurrency.lockutils [None req-82e835a5-f72a-41b4-bb91-607fdececc16 tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Lock "da23282a-bbda-47bf-9d9c-337ee9996779" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.036s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.200923] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1036.200923] env[63028]: value = "task-2736144" [ 1036.200923] env[63028]: _type = "Task" [ 1036.200923] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.211965] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736144, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.271836] env[63028]: DEBUG nova.network.neutron [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Updated VIF entry in instance network info cache for port 47a80dc6-e819-410f-8257-cfc2cce0d3d9. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1036.272297] env[63028]: DEBUG nova.network.neutron [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Updating instance_info_cache with network_info: [{"id": "47a80dc6-e819-410f-8257-cfc2cce0d3d9", "address": "fa:16:3e:b7:57:e4", "network": {"id": "8fd5dd35-416e-45e2-aea5-8c2c22752ef7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1819490475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae448aa28a84aa6863fffc24a5448fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47a80dc6-e8", "ovs_interfaceid": "47a80dc6-e819-410f-8257-cfc2cce0d3d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.283671] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1036.283929] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1036.284106] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleting the datastore file [datastore1] c492dea4-9779-4460-a559-5b82fb0643f0 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.285074] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-927d57e6-2806-4c8d-9b8d-af76eab08158 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.293563] env[63028]: DEBUG oslo_vmware.api [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1036.293563] env[63028]: value = "task-2736146" [ 1036.293563] env[63028]: _type = "Task" [ 1036.293563] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.302340] env[63028]: DEBUG oslo_vmware.api [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736146, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.325170] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Acquiring lock "da23282a-bbda-47bf-9d9c-337ee9996779" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.325437] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Lock "da23282a-bbda-47bf-9d9c-337ee9996779" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.325653] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Acquiring lock "da23282a-bbda-47bf-9d9c-337ee9996779-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.325839] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Lock "da23282a-bbda-47bf-9d9c-337ee9996779-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.326138] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Lock "da23282a-bbda-47bf-9d9c-337ee9996779-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.328767] env[63028]: INFO nova.compute.manager [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Terminating instance [ 1036.416150] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.416838] env[63028]: DEBUG nova.compute.manager [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1036.419496] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.505s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.420921] env[63028]: INFO nova.compute.claims [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1036.580618] env[63028]: DEBUG nova.network.neutron [-] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.604744] env[63028]: INFO nova.compute.manager [-] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Took 2.16 seconds to deallocate network for instance. [ 1036.679940] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736143, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069769} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.680889] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1036.681250] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7c89b6-496b-44a7-ba03-a3d15c97c904 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.704976] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] b3930760-1888-4f80-85d8-65120a25f275/b3930760-1888-4f80-85d8-65120a25f275.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.704976] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffeed255-d248-43f0-82f6-dc87c1900767 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.727259] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736144, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.728563] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1036.728563] env[63028]: value = "task-2736147" [ 1036.728563] env[63028]: _type = "Task" [ 1036.728563] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.735892] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736147, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.775225] env[63028]: DEBUG oslo_concurrency.lockutils [req-6029a122-195f-4006-bed6-82fc79a0e906 req-c6970dee-e58a-4368-9ee3-e2c2b9be3085 service nova] Releasing lock "refresh_cache-b16d85d7-13f3-4be0-8495-2fd2c1476f01" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.803658] env[63028]: DEBUG oslo_vmware.api [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736146, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.832450] env[63028]: DEBUG nova.compute.manager [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1036.832450] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.833344] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99d0898-a00d-4954-8d3f-3d9b1b0c64eb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.840576] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.840812] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09609d8f-a9fa-4b12-b507-fc94b52bce7d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.846738] env[63028]: DEBUG oslo_vmware.api [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Waiting for the task: (returnval){ [ 1036.846738] env[63028]: value = "task-2736148" [ 1036.846738] env[63028]: _type = "Task" [ 1036.846738] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.854867] env[63028]: DEBUG oslo_vmware.api [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.925947] env[63028]: DEBUG nova.compute.utils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1036.930641] env[63028]: DEBUG nova.compute.manager [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1036.930868] env[63028]: DEBUG nova.network.neutron [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1036.983672] env[63028]: DEBUG nova.policy [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25218cd4756d409c9fee41c970fb2d32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e85128c5c889438bbb1df571b7756c6a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1037.085071] env[63028]: INFO nova.compute.manager [-] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Took 1.38 seconds to deallocate network for instance. [ 1037.111440] env[63028]: DEBUG oslo_concurrency.lockutils [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.214667] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736144, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.238135] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736147, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.305565] env[63028]: DEBUG oslo_vmware.api [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736146, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.589618} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.305845] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.305997] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.306185] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.306359] env[63028]: INFO nova.compute.manager [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1037.306595] env[63028]: DEBUG oslo.service.loopingcall [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1037.306782] env[63028]: DEBUG nova.compute.manager [-] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1037.306917] env[63028]: DEBUG nova.network.neutron [-] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1037.356560] env[63028]: DEBUG oslo_vmware.api [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736148, 'name': PowerOffVM_Task, 'duration_secs': 0.202672} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.356845] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1037.357046] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1037.357304] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4e52a2d-ef45-439b-82f2-6f9687a09aa6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.435021] env[63028]: DEBUG nova.compute.manager [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1037.446248] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1037.446467] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1037.446650] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Deleting the datastore file [datastore1] da23282a-bbda-47bf-9d9c-337ee9996779 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.446911] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53c0b1d8-ae48-4fa0-82ee-1f7131e3464d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.455169] env[63028]: DEBUG oslo_vmware.api [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Waiting for the task: (returnval){ [ 1037.455169] env[63028]: value = "task-2736150" [ 1037.455169] env[63028]: _type = "Task" [ 1037.455169] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.463445] env[63028]: DEBUG oslo_vmware.api [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736150, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.478893] env[63028]: DEBUG nova.network.neutron [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Successfully created port: 2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1037.596007] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.646638] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8212194-5675-45c7-bbed-9bd2c8cdc168 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.666244] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance '56e6ade9-893b-4c85-b0b8-e9f7b12cbad6' progress to 0 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1037.673632] env[63028]: DEBUG nova.compute.manager [req-be8d411e-f628-4f1a-9908-d988420e5245 req-213cdac6-4797-49fd-81d6-09eabf34ba37 service nova] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Received event network-vif-deleted-47a80dc6-e819-410f-8257-cfc2cce0d3d9 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1037.722967] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736144, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.02061} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.724410] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] f804ec95-0b97-4960-844d-b678b97fc401/f804ec95-0b97-4960-844d-b678b97fc401.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1037.724703] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1037.724995] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fde10ab1-c843-4eef-abf5-48a45d67332a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.734760] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1037.734760] env[63028]: value = "task-2736151" [ 1037.734760] env[63028]: _type = "Task" [ 1037.734760] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.743956] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736147, 'name': ReconfigVM_Task, 'duration_secs': 0.672621} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.744975] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Reconfigured VM instance instance-00000061 to attach disk [datastore2] b3930760-1888-4f80-85d8-65120a25f275/b3930760-1888-4f80-85d8-65120a25f275.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.745653] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ec9134a-0f44-4873-b904-c197b64590f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.749837] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736151, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.755082] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1037.755082] env[63028]: value = "task-2736152" [ 1037.755082] env[63028]: _type = "Task" [ 1037.755082] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.765552] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736152, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.846894] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49627c98-9561-40b9-bf67-1ef27bb9a8e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.854307] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32968c2-463c-4c3e-836c-b8cd7e2637fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.887248] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da45ba6c-a87d-4346-b1c3-3da1cb807f0e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.895362] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddf9a9a-c96c-4da5-be77-06d906e2fd33 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.910712] env[63028]: DEBUG nova.compute.provider_tree [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1037.964608] env[63028]: DEBUG oslo_vmware.api [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Task: {'id': task-2736150, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.460835} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.965620] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.965620] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.965620] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.969693] env[63028]: INFO nova.compute.manager [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1037.969693] env[63028]: DEBUG oslo.service.loopingcall [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1037.969693] env[63028]: DEBUG nova.compute.manager [-] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1037.969693] env[63028]: DEBUG nova.network.neutron [-] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1038.176626] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1038.176840] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1df32369-4f44-4b94-9b8f-b430bef773bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.185435] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1038.185435] env[63028]: value = "task-2736154" [ 1038.185435] env[63028]: _type = "Task" [ 1038.185435] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.194092] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736154, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.225034] env[63028]: DEBUG nova.network.neutron [-] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.247584] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736151, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06784} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.247988] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1038.249099] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618e2956-65b1-4221-9245-35d841b1dbc1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.272926] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] f804ec95-0b97-4960-844d-b678b97fc401/f804ec95-0b97-4960-844d-b678b97fc401.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.276612] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5b9b052-ede1-4f20-b149-83fabdebfc08 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.298355] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736152, 'name': Rename_Task, 'duration_secs': 0.136838} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.302017] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1038.302017] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1038.302017] env[63028]: value = "task-2736155" [ 1038.302017] env[63028]: _type = "Task" [ 1038.302017] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.302017] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2cf5f04b-5ecb-4d83-a5b7-706082d87708 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.314804] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736155, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.314804] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1038.314804] env[63028]: value = "task-2736156" [ 1038.314804] env[63028]: _type = "Task" [ 1038.314804] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.324294] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736156, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.413752] env[63028]: DEBUG nova.scheduler.client.report [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1039.175341] env[63028]: DEBUG nova.compute.manager [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1039.178829] env[63028]: DEBUG nova.network.neutron [-] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.179955] env[63028]: INFO nova.compute.manager [-] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Took 1.87 seconds to deallocate network for instance. [ 1039.180618] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.761s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.181069] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1039.184526] env[63028]: DEBUG nova.compute.manager [req-3d4f52fb-f6d8-4ff4-aded-7f59bee049f0 req-a30248dc-315c-4bdf-941f-778bcb462414 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Received event network-vif-plugged-2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1039.184737] env[63028]: DEBUG oslo_concurrency.lockutils [req-3d4f52fb-f6d8-4ff4-aded-7f59bee049f0 req-a30248dc-315c-4bdf-941f-778bcb462414 service nova] Acquiring lock "60d18f14-536a-4b0f-912b-21f3f5a30d28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.184952] env[63028]: DEBUG oslo_concurrency.lockutils [req-3d4f52fb-f6d8-4ff4-aded-7f59bee049f0 req-a30248dc-315c-4bdf-941f-778bcb462414 service nova] Lock "60d18f14-536a-4b0f-912b-21f3f5a30d28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.185138] env[63028]: DEBUG oslo_concurrency.lockutils [req-3d4f52fb-f6d8-4ff4-aded-7f59bee049f0 req-a30248dc-315c-4bdf-941f-778bcb462414 service nova] Lock "60d18f14-536a-4b0f-912b-21f3f5a30d28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.185304] env[63028]: DEBUG nova.compute.manager [req-3d4f52fb-f6d8-4ff4-aded-7f59bee049f0 req-a30248dc-315c-4bdf-941f-778bcb462414 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] No waiting events found dispatching network-vif-plugged-2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1039.185496] env[63028]: WARNING nova.compute.manager [req-3d4f52fb-f6d8-4ff4-aded-7f59bee049f0 req-a30248dc-315c-4bdf-941f-778bcb462414 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Received unexpected event network-vif-plugged-2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291 for instance with vm_state building and task_state spawning. [ 1039.196382] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.265s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.197818] env[63028]: INFO nova.compute.claims [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.211620] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736156, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.213868] env[63028]: DEBUG nova.virt.hardware [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1039.214109] env[63028]: DEBUG nova.virt.hardware [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1039.214270] env[63028]: DEBUG nova.virt.hardware [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1039.214454] env[63028]: DEBUG nova.virt.hardware [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1039.214667] env[63028]: DEBUG nova.virt.hardware [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1039.214880] env[63028]: DEBUG nova.virt.hardware [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1039.215116] env[63028]: DEBUG nova.virt.hardware [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1039.215282] env[63028]: DEBUG nova.virt.hardware [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1039.215453] env[63028]: DEBUG nova.virt.hardware [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1039.215613] env[63028]: DEBUG nova.virt.hardware [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1039.215782] env[63028]: DEBUG nova.virt.hardware [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1039.222457] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15988290-b856-4c92-b4f2-0c91e391161d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.225659] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736155, 'name': ReconfigVM_Task, 'duration_secs': 0.293131} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.225942] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736154, 'name': PowerOffVM_Task, 'duration_secs': 0.226754} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.226292] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Reconfigured VM instance instance-00000062 to attach disk [datastore2] f804ec95-0b97-4960-844d-b678b97fc401/f804ec95-0b97-4960-844d-b678b97fc401.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.226783] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1039.226963] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance '56e6ade9-893b-4c85-b0b8-e9f7b12cbad6' progress to 17 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1039.230605] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a246d6c-3ac2-461b-baa5-83715f79c172 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.236826] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1016a241-3baf-4907-b709-cdce59804ef6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.241966] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1039.241966] env[63028]: value = "task-2736157" [ 1039.241966] env[63028]: _type = "Task" [ 1039.241966] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.262723] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736157, 'name': Rename_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.425130] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.425255] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.630191] env[63028]: DEBUG nova.network.neutron [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Successfully updated port: 2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1039.686309] env[63028]: INFO nova.compute.manager [-] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Took 1.72 seconds to deallocate network for instance. [ 1039.687581] env[63028]: DEBUG nova.compute.utils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1039.693292] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1039.693464] env[63028]: DEBUG nova.network.neutron [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1039.695246] env[63028]: DEBUG oslo_vmware.api [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736156, 'name': PowerOnVM_Task, 'duration_secs': 0.995861} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.698828] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1039.699115] env[63028]: INFO nova.compute.manager [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Took 10.95 seconds to spawn the instance on the hypervisor. [ 1039.699301] env[63028]: DEBUG nova.compute.manager [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1039.700807] env[63028]: DEBUG nova.compute.manager [req-33b4e2d0-9d8b-4f68-97f5-63f479d8b1c7 req-887270df-b19e-4905-b3f9-74a38fc8c424 service nova] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Received event network-vif-deleted-7a1202c7-93f9-43c0-ab7e-9c1c8a578e37 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1039.701050] env[63028]: DEBUG nova.compute.manager [req-33b4e2d0-9d8b-4f68-97f5-63f479d8b1c7 req-887270df-b19e-4905-b3f9-74a38fc8c424 service nova] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Received event network-vif-deleted-677d4bb7-eef2-4be9-9671-9c92ee555906 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1039.701231] env[63028]: DEBUG nova.compute.manager [req-33b4e2d0-9d8b-4f68-97f5-63f479d8b1c7 req-887270df-b19e-4905-b3f9-74a38fc8c424 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Received event network-changed-2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1039.701389] env[63028]: DEBUG nova.compute.manager [req-33b4e2d0-9d8b-4f68-97f5-63f479d8b1c7 req-887270df-b19e-4905-b3f9-74a38fc8c424 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Refreshing instance network info cache due to event network-changed-2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1039.701586] env[63028]: DEBUG oslo_concurrency.lockutils [req-33b4e2d0-9d8b-4f68-97f5-63f479d8b1c7 req-887270df-b19e-4905-b3f9-74a38fc8c424 service nova] Acquiring lock "refresh_cache-60d18f14-536a-4b0f-912b-21f3f5a30d28" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.701725] env[63028]: DEBUG oslo_concurrency.lockutils [req-33b4e2d0-9d8b-4f68-97f5-63f479d8b1c7 req-887270df-b19e-4905-b3f9-74a38fc8c424 service nova] Acquired lock "refresh_cache-60d18f14-536a-4b0f-912b-21f3f5a30d28" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.701876] env[63028]: DEBUG nova.network.neutron [req-33b4e2d0-9d8b-4f68-97f5-63f479d8b1c7 req-887270df-b19e-4905-b3f9-74a38fc8c424 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Refreshing network info cache for port 2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1039.703692] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c0d15f-7810-44d7-a512-00b44e2fe6c7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.710245] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.735894] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1039.736166] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1039.736327] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1039.736509] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1039.736656] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1039.736804] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1039.737009] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1039.737177] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1039.737341] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1039.737502] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1039.737673] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1039.744373] env[63028]: DEBUG nova.policy [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2169c5f761b3452bb04fdf14cf6f1ff5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c83df00f440248ca9e84394ce6365144', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1039.746169] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e37b779-2e2a-4f76-9743-67973113828d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.766126] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736157, 'name': Rename_Task, 'duration_secs': 0.21496} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.767332] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1039.767644] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1039.767644] env[63028]: value = "task-2736158" [ 1039.767644] env[63028]: _type = "Task" [ 1039.767644] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.767821] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5f35ebf-2be2-47aa-a9ef-bb234baafc66 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.777010] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736158, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.778137] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1039.778137] env[63028]: value = "task-2736159" [ 1039.778137] env[63028]: _type = "Task" [ 1039.778137] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.785619] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736159, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.935507] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.935852] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.935952] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.936121] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.936271] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.936414] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.936709] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63028) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1039.936709] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1040.046537] env[63028]: DEBUG nova.network.neutron [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Successfully created port: 4d0af6d0-21b5-49d7-a857-43d30a0a2514 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1040.132038] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-60d18f14-536a-4b0f-912b-21f3f5a30d28" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.196885] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1040.203761] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.228457] env[63028]: INFO nova.compute.manager [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Took 33.93 seconds to build instance. [ 1040.281501] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736158, 'name': ReconfigVM_Task, 'duration_secs': 0.250098} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.286814] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance '56e6ade9-893b-4c85-b0b8-e9f7b12cbad6' progress to 33 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1040.289278] env[63028]: DEBUG nova.network.neutron [req-33b4e2d0-9d8b-4f68-97f5-63f479d8b1c7 req-887270df-b19e-4905-b3f9-74a38fc8c424 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1040.297240] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736159, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.400946] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1040.401204] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550854', 'volume_id': 'fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6', 'name': 'volume-fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd41a1eae-bb89-4222-9466-d86af891c654', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6', 'serial': 'fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1040.404377] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38708f0f-95ec-4d47-b813-4e5ba1767763 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.407796] env[63028]: DEBUG nova.network.neutron [req-33b4e2d0-9d8b-4f68-97f5-63f479d8b1c7 req-887270df-b19e-4905-b3f9-74a38fc8c424 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.424879] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0321f2ef-e7bd-45df-9e5a-4529bb9a09a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.444628] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.451769] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] volume-fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6/volume-fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.456342] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a1f5261-806b-4df8-a828-d4e4edae96ca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.476257] env[63028]: DEBUG oslo_vmware.api [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1040.476257] env[63028]: value = "task-2736160" [ 1040.476257] env[63028]: _type = "Task" [ 1040.476257] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.488171] env[63028]: DEBUG oslo_vmware.api [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736160, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.616841] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05995a0d-ee9c-4e0c-b711-176bd515da72 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.624021] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2805fa50-579c-4de7-8520-5862f635dacd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.653829] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1aabc1-065b-4087-9112-9a55d6fa4faf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.661250] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c167b7-fd05-4130-9550-4195f008de7c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.675031] env[63028]: DEBUG nova.compute.provider_tree [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.731802] env[63028]: DEBUG oslo_concurrency.lockutils [None req-700d1c14-0bbf-4780-a97c-2e177b5c2496 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "b3930760-1888-4f80-85d8-65120a25f275" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.438s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.794884] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1040.795214] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1040.795398] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1040.795644] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1040.795869] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1040.796123] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1040.796386] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1040.796615] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1040.796964] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1040.797250] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1040.797447] env[63028]: DEBUG nova.virt.hardware [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1040.802675] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Reconfiguring VM instance instance-0000005d to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1040.802976] env[63028]: DEBUG oslo_vmware.api [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736159, 'name': PowerOnVM_Task, 'duration_secs': 0.653936} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.803548] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-654c7064-c262-471c-b902-4c020066c9ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.818452] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1040.818628] env[63028]: INFO nova.compute.manager [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Took 9.52 seconds to spawn the instance on the hypervisor. [ 1040.818802] env[63028]: DEBUG nova.compute.manager [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1040.819600] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c7a422-9f46-487c-ba35-48d2ec52e734 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.830492] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1040.830492] env[63028]: value = "task-2736161" [ 1040.830492] env[63028]: _type = "Task" [ 1040.830492] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.838594] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736161, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.910904] env[63028]: DEBUG oslo_concurrency.lockutils [req-33b4e2d0-9d8b-4f68-97f5-63f479d8b1c7 req-887270df-b19e-4905-b3f9-74a38fc8c424 service nova] Releasing lock "refresh_cache-60d18f14-536a-4b0f-912b-21f3f5a30d28" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.911347] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-60d18f14-536a-4b0f-912b-21f3f5a30d28" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.911513] env[63028]: DEBUG nova.network.neutron [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1040.985595] env[63028]: DEBUG oslo_vmware.api [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736160, 'name': ReconfigVM_Task, 'duration_secs': 0.359689} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.985871] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfigured VM instance instance-0000005a to attach disk [datastore2] volume-fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6/volume-fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1040.990881] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-832d29aa-ec4e-490c-ab9d-bc106c8c2af1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.004731] env[63028]: DEBUG oslo_vmware.api [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1041.004731] env[63028]: value = "task-2736162" [ 1041.004731] env[63028]: _type = "Task" [ 1041.004731] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.012710] env[63028]: DEBUG oslo_vmware.api [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736162, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.178788] env[63028]: DEBUG nova.scheduler.client.report [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1041.206496] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1041.232816] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1041.233066] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1041.233228] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1041.233447] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1041.233606] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1041.233758] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1041.233976] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1041.234168] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1041.234337] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1041.234550] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1041.234735] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1041.235607] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f773db54-287c-4fdf-8bb4-8fc4e5a14968 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.243470] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98e0a83-f19a-4688-a928-b7d8e7f7f71e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.340724] env[63028]: INFO nova.compute.manager [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Took 33.07 seconds to build instance. [ 1041.347604] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736161, 'name': ReconfigVM_Task, 'duration_secs': 0.195383} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.347855] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Reconfigured VM instance instance-0000005d to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1041.348609] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c03d76-96c4-433f-b3b1-8986fe90f4d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.370447] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6/56e6ade9-893b-4c85-b0b8-e9f7b12cbad6.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1041.370447] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b8fb120-3ca2-47c4-b414-12b1c3cf9484 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.390027] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1041.390027] env[63028]: value = "task-2736163" [ 1041.390027] env[63028]: _type = "Task" [ 1041.390027] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.398737] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736163, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.460086] env[63028]: DEBUG nova.network.neutron [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1041.514355] env[63028]: DEBUG oslo_vmware.api [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736162, 'name': ReconfigVM_Task, 'duration_secs': 0.150729} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.516701] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550854', 'volume_id': 'fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6', 'name': 'volume-fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd41a1eae-bb89-4222-9466-d86af891c654', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6', 'serial': 'fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1041.684400] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.688152] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1041.690124] env[63028]: DEBUG oslo_concurrency.lockutils [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.322s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.690411] env[63028]: DEBUG nova.objects.instance [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lazy-loading 'resources' on Instance uuid c0693e4c-30b2-4eda-be1e-f6186d78038b {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.705755] env[63028]: DEBUG nova.network.neutron [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Updating instance_info_cache with network_info: [{"id": "2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291", "address": "fa:16:3e:d1:35:37", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e1ed2b6-3a", "ovs_interfaceid": "2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.751903] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "2c2fb165-8906-4d42-a839-89ea6c8814ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.752174] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "2c2fb165-8906-4d42-a839-89ea6c8814ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.778542] env[63028]: DEBUG nova.compute.manager [req-ec3e7de5-028d-40bf-acbf-c9c782dc54bf req-5d2aca17-027d-4191-b74c-eae26e97b6a0 service nova] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Received event network-vif-plugged-4d0af6d0-21b5-49d7-a857-43d30a0a2514 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1041.778807] env[63028]: DEBUG oslo_concurrency.lockutils [req-ec3e7de5-028d-40bf-acbf-c9c782dc54bf req-5d2aca17-027d-4191-b74c-eae26e97b6a0 service nova] Acquiring lock "3566ab6f-1f8a-472d-9efb-47fa2520a215-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.779072] env[63028]: DEBUG oslo_concurrency.lockutils [req-ec3e7de5-028d-40bf-acbf-c9c782dc54bf req-5d2aca17-027d-4191-b74c-eae26e97b6a0 service nova] Lock "3566ab6f-1f8a-472d-9efb-47fa2520a215-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.779228] env[63028]: DEBUG oslo_concurrency.lockutils [req-ec3e7de5-028d-40bf-acbf-c9c782dc54bf req-5d2aca17-027d-4191-b74c-eae26e97b6a0 service nova] Lock "3566ab6f-1f8a-472d-9efb-47fa2520a215-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.779395] env[63028]: DEBUG nova.compute.manager [req-ec3e7de5-028d-40bf-acbf-c9c782dc54bf req-5d2aca17-027d-4191-b74c-eae26e97b6a0 service nova] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] No waiting events found dispatching network-vif-plugged-4d0af6d0-21b5-49d7-a857-43d30a0a2514 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1041.779558] env[63028]: WARNING nova.compute.manager [req-ec3e7de5-028d-40bf-acbf-c9c782dc54bf req-5d2aca17-027d-4191-b74c-eae26e97b6a0 service nova] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Received unexpected event network-vif-plugged-4d0af6d0-21b5-49d7-a857-43d30a0a2514 for instance with vm_state building and task_state spawning. [ 1041.843245] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f0751db6-5ce2-45a6-a330-fe5726ff7979 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.584s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.848040] env[63028]: DEBUG nova.network.neutron [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Successfully updated port: 4d0af6d0-21b5-49d7-a857-43d30a0a2514 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1041.901352] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736163, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.193777] env[63028]: DEBUG nova.compute.utils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1042.195543] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1042.199020] env[63028]: DEBUG nova.network.neutron [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1042.211915] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-60d18f14-536a-4b0f-912b-21f3f5a30d28" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.212205] env[63028]: DEBUG nova.compute.manager [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Instance network_info: |[{"id": "2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291", "address": "fa:16:3e:d1:35:37", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e1ed2b6-3a", "ovs_interfaceid": "2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1042.212596] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:35:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1042.220286] env[63028]: DEBUG oslo.service.loopingcall [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1042.220822] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1042.221109] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3538589d-c199-489c-9bc0-e6a6147a9544 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.244564] env[63028]: DEBUG nova.policy [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2169c5f761b3452bb04fdf14cf6f1ff5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c83df00f440248ca9e84394ce6365144', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1042.248192] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1042.248192] env[63028]: value = "task-2736164" [ 1042.248192] env[63028]: _type = "Task" [ 1042.248192] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.254099] env[63028]: DEBUG nova.compute.manager [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1042.260444] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736164, 'name': CreateVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.356505] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "refresh_cache-3566ab6f-1f8a-472d-9efb-47fa2520a215" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.357980] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired lock "refresh_cache-3566ab6f-1f8a-472d-9efb-47fa2520a215" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.357980] env[63028]: DEBUG nova.network.neutron [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1042.404026] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736163, 'name': ReconfigVM_Task, 'duration_secs': 0.549462} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.404026] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6/56e6ade9-893b-4c85-b0b8-e9f7b12cbad6.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1042.404026] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance '56e6ade9-893b-4c85-b0b8-e9f7b12cbad6' progress to 50 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1042.557721] env[63028]: DEBUG nova.objects.instance [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'flavor' on Instance uuid d41a1eae-bb89-4222-9466-d86af891c654 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.570119] env[63028]: DEBUG nova.network.neutron [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Successfully created port: f2d3b19c-6db9-4224-812a-45ec031221f0 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1042.624317] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e928cdc-c00c-4110-b3b8-9de4f5551cb3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.636020] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f82d22e-77c3-4f5c-a15b-b0bf4ebb4809 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.664501] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb2ffa9-2852-4736-a159-1feca5793936 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.674167] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81ca9ee-2d88-4f2d-9bec-5e0c97b846ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.686609] env[63028]: DEBUG nova.compute.provider_tree [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.689760] env[63028]: DEBUG nova.compute.manager [req-9759bf2a-4ea3-41f3-ac66-d16e3a1b9894 req-182f8caa-691d-407f-b6ae-5cad5d92c9f1 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Received event network-changed-33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1042.689898] env[63028]: DEBUG nova.compute.manager [req-9759bf2a-4ea3-41f3-ac66-d16e3a1b9894 req-182f8caa-691d-407f-b6ae-5cad5d92c9f1 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Refreshing instance network info cache due to event network-changed-33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1042.690121] env[63028]: DEBUG oslo_concurrency.lockutils [req-9759bf2a-4ea3-41f3-ac66-d16e3a1b9894 req-182f8caa-691d-407f-b6ae-5cad5d92c9f1 service nova] Acquiring lock "refresh_cache-f804ec95-0b97-4960-844d-b678b97fc401" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.690320] env[63028]: DEBUG oslo_concurrency.lockutils [req-9759bf2a-4ea3-41f3-ac66-d16e3a1b9894 req-182f8caa-691d-407f-b6ae-5cad5d92c9f1 service nova] Acquired lock "refresh_cache-f804ec95-0b97-4960-844d-b678b97fc401" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.690413] env[63028]: DEBUG nova.network.neutron [req-9759bf2a-4ea3-41f3-ac66-d16e3a1b9894 req-182f8caa-691d-407f-b6ae-5cad5d92c9f1 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Refreshing network info cache for port 33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1042.700588] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1042.759024] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736164, 'name': CreateVM_Task, 'duration_secs': 0.320687} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.759842] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1042.760585] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.760762] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.761364] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1042.761592] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7ec026c-987f-4ec0-8bff-53ee6702c275 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.769680] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1042.769680] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5280d6ae-3072-0731-c236-d11313ced285" [ 1042.769680] env[63028]: _type = "Task" [ 1042.769680] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.781547] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5280d6ae-3072-0731-c236-d11313ced285, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.783935] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.905203] env[63028]: DEBUG nova.network.neutron [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1042.914768] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ee9e7c-286d-4426-b074-6651a2b7c541 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.945643] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9dcba2-9486-4df6-9a29-97542100a0e5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.969163] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance '56e6ade9-893b-4c85-b0b8-e9f7b12cbad6' progress to 67 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1043.063284] env[63028]: DEBUG oslo_concurrency.lockutils [None req-258de24d-ebbe-4ab7-bc13-3ac54b89ecef tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "d41a1eae-bb89-4222-9466-d86af891c654" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.813s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.091638] env[63028]: DEBUG nova.network.neutron [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Updating instance_info_cache with network_info: [{"id": "4d0af6d0-21b5-49d7-a857-43d30a0a2514", "address": "fa:16:3e:c8:14:94", "network": {"id": "15d7a776-341f-4ba6-b8c6-f0cd9f0688c1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1631909054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c83df00f440248ca9e84394ce6365144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d0af6d0-21", "ovs_interfaceid": "4d0af6d0-21b5-49d7-a857-43d30a0a2514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.193127] env[63028]: DEBUG nova.scheduler.client.report [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1043.282698] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5280d6ae-3072-0731-c236-d11313ced285, 'name': SearchDatastore_Task, 'duration_secs': 0.019353} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.283020] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.283873] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1043.283873] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.283873] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.283873] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1043.284051] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a8a2439-66fd-4f4c-b459-163f4d6c4ed7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.291797] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1043.291964] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1043.292692] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d504b1b1-1ba8-49a3-b7db-51a6e9532b1d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.299868] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1043.299868] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d9b655-bd01-1327-099e-2beb1833222b" [ 1043.299868] env[63028]: _type = "Task" [ 1043.299868] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.307612] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d9b655-bd01-1327-099e-2beb1833222b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.407292] env[63028]: DEBUG nova.network.neutron [req-9759bf2a-4ea3-41f3-ac66-d16e3a1b9894 req-182f8caa-691d-407f-b6ae-5cad5d92c9f1 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Updated VIF entry in instance network info cache for port 33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1043.407857] env[63028]: DEBUG nova.network.neutron [req-9759bf2a-4ea3-41f3-ac66-d16e3a1b9894 req-182f8caa-691d-407f-b6ae-5cad5d92c9f1 service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Updating instance_info_cache with network_info: [{"id": "33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e", "address": "fa:16:3e:ee:58:09", "network": {"id": "e3a8845b-9fc6-46bd-8272-501135c875ad", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2047431351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f78ba4514500bfd4ed81b74526e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33f3a6b3-ea", "ovs_interfaceid": "33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.508810] env[63028]: DEBUG nova.network.neutron [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Port 1f5c01d1-9623-425e-8309-336dd1d961fa binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1043.526891] env[63028]: DEBUG nova.compute.manager [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Stashing vm_state: active {{(pid=63028) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1043.595220] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Releasing lock "refresh_cache-3566ab6f-1f8a-472d-9efb-47fa2520a215" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.595653] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Instance network_info: |[{"id": "4d0af6d0-21b5-49d7-a857-43d30a0a2514", "address": "fa:16:3e:c8:14:94", "network": {"id": "15d7a776-341f-4ba6-b8c6-f0cd9f0688c1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1631909054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c83df00f440248ca9e84394ce6365144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d0af6d0-21", "ovs_interfaceid": "4d0af6d0-21b5-49d7-a857-43d30a0a2514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1043.596208] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:14:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d0af6d0-21b5-49d7-a857-43d30a0a2514', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1043.608796] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Creating folder: Project (c83df00f440248ca9e84394ce6365144). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1043.610131] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97327c3e-a9bc-4ae3-9f9c-7ae94e71ce7f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.622521] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Created folder: Project (c83df00f440248ca9e84394ce6365144) in parent group-v550570. [ 1043.622815] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Creating folder: Instances. Parent ref: group-v550856. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1043.623158] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a46ce806-10b3-4e2e-a1b3-0094686a44a9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.632805] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Created folder: Instances in parent group-v550856. [ 1043.633134] env[63028]: DEBUG oslo.service.loopingcall [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1043.633379] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1043.633762] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e0f990c-5fbb-4ff0-ad7f-ce0a97b434f4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.659898] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1043.659898] env[63028]: value = "task-2736167" [ 1043.659898] env[63028]: _type = "Task" [ 1043.659898] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.669152] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736167, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.699168] env[63028]: DEBUG oslo_concurrency.lockutils [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.701495] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.298s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.701729] env[63028]: DEBUG nova.objects.instance [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lazy-loading 'resources' on Instance uuid 63524cd8-81de-419f-bb07-0326f3cb393f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.710312] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1043.726617] env[63028]: INFO nova.scheduler.client.report [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Deleted allocations for instance c0693e4c-30b2-4eda-be1e-f6186d78038b [ 1043.738302] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1043.738553] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1043.738712] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1043.738890] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1043.739046] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1043.739196] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1043.739404] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1043.739564] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1043.739729] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1043.739887] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1043.740073] env[63028]: DEBUG nova.virt.hardware [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1043.740898] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7bf83e-aa70-470c-bb84-411864bc839d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.749380] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500de0cf-6185-4f09-af30-5b67f55c2f7f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.806846] env[63028]: DEBUG nova.compute.manager [req-fc92960f-b5a0-4924-ade7-c936055c3f86 req-48116e79-3cb4-4083-b41c-9152b52481c3 service nova] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Received event network-changed-4d0af6d0-21b5-49d7-a857-43d30a0a2514 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1043.807108] env[63028]: DEBUG nova.compute.manager [req-fc92960f-b5a0-4924-ade7-c936055c3f86 req-48116e79-3cb4-4083-b41c-9152b52481c3 service nova] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Refreshing instance network info cache due to event network-changed-4d0af6d0-21b5-49d7-a857-43d30a0a2514. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1043.807382] env[63028]: DEBUG oslo_concurrency.lockutils [req-fc92960f-b5a0-4924-ade7-c936055c3f86 req-48116e79-3cb4-4083-b41c-9152b52481c3 service nova] Acquiring lock "refresh_cache-3566ab6f-1f8a-472d-9efb-47fa2520a215" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.807536] env[63028]: DEBUG oslo_concurrency.lockutils [req-fc92960f-b5a0-4924-ade7-c936055c3f86 req-48116e79-3cb4-4083-b41c-9152b52481c3 service nova] Acquired lock "refresh_cache-3566ab6f-1f8a-472d-9efb-47fa2520a215" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.807702] env[63028]: DEBUG nova.network.neutron [req-fc92960f-b5a0-4924-ade7-c936055c3f86 req-48116e79-3cb4-4083-b41c-9152b52481c3 service nova] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Refreshing network info cache for port 4d0af6d0-21b5-49d7-a857-43d30a0a2514 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.814510] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d9b655-bd01-1327-099e-2beb1833222b, 'name': SearchDatastore_Task, 'duration_secs': 0.00988} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.815472] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15864fd5-3397-489f-ae1c-ef0df1f153d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.822926] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1043.822926] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5203a68f-125e-2603-dc52-e6bbca43387d" [ 1043.822926] env[63028]: _type = "Task" [ 1043.822926] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.831103] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5203a68f-125e-2603-dc52-e6bbca43387d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.910391] env[63028]: DEBUG oslo_concurrency.lockutils [req-9759bf2a-4ea3-41f3-ac66-d16e3a1b9894 req-182f8caa-691d-407f-b6ae-5cad5d92c9f1 service nova] Releasing lock "refresh_cache-f804ec95-0b97-4960-844d-b678b97fc401" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.049708] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.170985] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736167, 'name': CreateVM_Task, 'duration_secs': 0.322051} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.171471] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1044.172987] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.173073] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.174167] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1044.174322] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-209c0940-e80e-40a3-9ec6-f95b8d19fb5a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.179012] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1044.179012] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5220f1bd-65d5-fd70-9109-83b465cfe332" [ 1044.179012] env[63028]: _type = "Task" [ 1044.179012] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.187829] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5220f1bd-65d5-fd70-9109-83b465cfe332, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.239346] env[63028]: DEBUG oslo_concurrency.lockutils [None req-26d9c995-1761-41dc-941a-dd0d7237cc1c tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "c0693e4c-30b2-4eda-be1e-f6186d78038b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.684s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.274340] env[63028]: DEBUG nova.network.neutron [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Successfully updated port: f2d3b19c-6db9-4224-812a-45ec031221f0 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1044.335682] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5203a68f-125e-2603-dc52-e6bbca43387d, 'name': SearchDatastore_Task, 'duration_secs': 0.013446} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.336391] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.336590] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 60d18f14-536a-4b0f-912b-21f3f5a30d28/60d18f14-536a-4b0f-912b-21f3f5a30d28.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1044.336874] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4d89e85-0aa0-4d5f-9e6f-58ea82bb0247 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.346815] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1044.346815] env[63028]: value = "task-2736168" [ 1044.346815] env[63028]: _type = "Task" [ 1044.346815] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.356799] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736168, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.518502] env[63028]: DEBUG nova.network.neutron [req-fc92960f-b5a0-4924-ade7-c936055c3f86 req-48116e79-3cb4-4083-b41c-9152b52481c3 service nova] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Updated VIF entry in instance network info cache for port 4d0af6d0-21b5-49d7-a857-43d30a0a2514. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.518847] env[63028]: DEBUG nova.network.neutron [req-fc92960f-b5a0-4924-ade7-c936055c3f86 req-48116e79-3cb4-4083-b41c-9152b52481c3 service nova] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Updating instance_info_cache with network_info: [{"id": "4d0af6d0-21b5-49d7-a857-43d30a0a2514", "address": "fa:16:3e:c8:14:94", "network": {"id": "15d7a776-341f-4ba6-b8c6-f0cd9f0688c1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1631909054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c83df00f440248ca9e84394ce6365144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d0af6d0-21", "ovs_interfaceid": "4d0af6d0-21b5-49d7-a857-43d30a0a2514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.536431] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.536696] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.536883] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.537940] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Acquiring lock "53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.538160] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Lock "53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.538345] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Acquiring lock "53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.538819] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Lock "53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.538819] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Lock "53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.542432] env[63028]: INFO nova.compute.manager [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Terminating instance [ 1044.635751] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6376c3-19c5-427e-9362-26261d5b82ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.643532] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f17f71f-6fc3-47c0-a838-c147a93fdf68 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.674646] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bb51ac-a461-44ba-b9ac-1bc4842a97f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.684742] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cedcaa-a95a-41c5-81d1-f84440985bd1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.693779] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5220f1bd-65d5-fd70-9109-83b465cfe332, 'name': SearchDatastore_Task, 'duration_secs': 0.034417} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.701133] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.701368] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1044.701597] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.701745] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.701925] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1044.702376] env[63028]: DEBUG nova.compute.provider_tree [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.703887] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c23e8d90-a782-43e9-852b-8c52c57fc2f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.713765] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1044.713953] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1044.714644] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bd9d499-8419-4388-bb5b-32c5f20b51a1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.719642] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1044.719642] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e56b38-5fec-5f28-dff6-a6af061e2cf8" [ 1044.719642] env[63028]: _type = "Task" [ 1044.719642] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.727292] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e56b38-5fec-5f28-dff6-a6af061e2cf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.776906] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "refresh_cache-1cf111f2-df5e-48a6-905a-bc2d3ea45202" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.777099] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired lock "refresh_cache-1cf111f2-df5e-48a6-905a-bc2d3ea45202" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.777361] env[63028]: DEBUG nova.network.neutron [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1044.856806] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736168, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.876952] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.876952] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.021539] env[63028]: DEBUG oslo_concurrency.lockutils [req-fc92960f-b5a0-4924-ade7-c936055c3f86 req-48116e79-3cb4-4083-b41c-9152b52481c3 service nova] Releasing lock "refresh_cache-3566ab6f-1f8a-472d-9efb-47fa2520a215" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.046935] env[63028]: DEBUG nova.compute.manager [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1045.047190] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1045.048464] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d04ed2f-6d8f-4082-bb42-b80c3cc42fea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.059446] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1045.059967] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98673e00-8c36-434b-ad77-6f048c499127 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.068854] env[63028]: DEBUG oslo_vmware.api [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Waiting for the task: (returnval){ [ 1045.068854] env[63028]: value = "task-2736169" [ 1045.068854] env[63028]: _type = "Task" [ 1045.068854] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.078682] env[63028]: DEBUG oslo_vmware.api [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736169, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.207633] env[63028]: DEBUG nova.scheduler.client.report [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1045.232558] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e56b38-5fec-5f28-dff6-a6af061e2cf8, 'name': SearchDatastore_Task, 'duration_secs': 0.009008} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.233552] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c6edfc6-b9b6-47e2-be04-b8ab7a37868f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.240317] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1045.240317] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522ff3b9-f9b3-c103-0f37-b0fc7060e9e3" [ 1045.240317] env[63028]: _type = "Task" [ 1045.240317] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.248850] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522ff3b9-f9b3-c103-0f37-b0fc7060e9e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.311378] env[63028]: DEBUG nova.network.neutron [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1045.357530] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736168, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.941342} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.359894] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 60d18f14-536a-4b0f-912b-21f3f5a30d28/60d18f14-536a-4b0f-912b-21f3f5a30d28.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1045.360135] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1045.360722] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-290c21ae-c6d4-4380-a345-b25c49edcd5a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.366978] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1045.366978] env[63028]: value = "task-2736170" [ 1045.366978] env[63028]: _type = "Task" [ 1045.366978] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.377454] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736170, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.379250] env[63028]: INFO nova.compute.manager [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Detaching volume 0e21440a-90bd-4920-bda0-bdf25396cbd3 [ 1045.434565] env[63028]: INFO nova.virt.block_device [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Attempting to driver detach volume 0e21440a-90bd-4920-bda0-bdf25396cbd3 from mountpoint /dev/sdb [ 1045.434780] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1045.434957] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550804', 'volume_id': '0e21440a-90bd-4920-bda0-bdf25396cbd3', 'name': 'volume-0e21440a-90bd-4920-bda0-bdf25396cbd3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a50e1167-d8ed-4099-83c3-a5066ab0be1f', 'attached_at': '', 'detached_at': '', 'volume_id': '0e21440a-90bd-4920-bda0-bdf25396cbd3', 'serial': '0e21440a-90bd-4920-bda0-bdf25396cbd3'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1045.435828] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a8c0c1-4b87-40e7-a130-1c31dacfe4f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.459688] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49dce5b-6f48-4345-9e2c-9a3986748cf9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.466682] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e058a1-1ade-47db-8611-5f6dacf2f412 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.486389] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5b9be7-1467-4d29-843e-c1a7e37aac49 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.489459] env[63028]: DEBUG nova.network.neutron [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Updating instance_info_cache with network_info: [{"id": "f2d3b19c-6db9-4224-812a-45ec031221f0", "address": "fa:16:3e:98:c6:85", "network": {"id": "15d7a776-341f-4ba6-b8c6-f0cd9f0688c1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1631909054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c83df00f440248ca9e84394ce6365144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2d3b19c-6d", "ovs_interfaceid": "f2d3b19c-6db9-4224-812a-45ec031221f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.503649] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] The volume has not been displaced from its original location: [datastore2] volume-0e21440a-90bd-4920-bda0-bdf25396cbd3/volume-0e21440a-90bd-4920-bda0-bdf25396cbd3.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1045.508811] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Reconfiguring VM instance instance-00000045 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1045.509650] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d24bcdfa-6fcb-4126-bd9a-7c56e7cda991 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.530677] env[63028]: DEBUG oslo_vmware.api [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1045.530677] env[63028]: value = "task-2736171" [ 1045.530677] env[63028]: _type = "Task" [ 1045.530677] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.538786] env[63028]: DEBUG oslo_vmware.api [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736171, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.578819] env[63028]: DEBUG oslo_vmware.api [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736169, 'name': PowerOffVM_Task, 'duration_secs': 0.282671} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.579719] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.579886] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.580073] env[63028]: DEBUG nova.network.neutron [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1045.581275] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1045.581455] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1045.581888] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cb4a4cc-9f1b-4063-b63d-0234699fcc15 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.655234] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1045.655674] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1045.655844] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Deleting the datastore file [datastore2] 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1045.656523] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2c9bf73-7f07-4948-9b52-63de2acff540 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.663062] env[63028]: DEBUG oslo_vmware.api [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Waiting for the task: (returnval){ [ 1045.663062] env[63028]: value = "task-2736173" [ 1045.663062] env[63028]: _type = "Task" [ 1045.663062] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.672440] env[63028]: DEBUG oslo_vmware.api [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736173, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.713476] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.012s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.716613] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.832s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.717554] env[63028]: INFO nova.compute.claims [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.738785] env[63028]: INFO nova.scheduler.client.report [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Deleted allocations for instance 63524cd8-81de-419f-bb07-0326f3cb393f [ 1045.754774] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522ff3b9-f9b3-c103-0f37-b0fc7060e9e3, 'name': SearchDatastore_Task, 'duration_secs': 0.01031} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.755033] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.755331] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 3566ab6f-1f8a-472d-9efb-47fa2520a215/3566ab6f-1f8a-472d-9efb-47fa2520a215.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1045.755610] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e99cf3e5-1648-46b0-aef8-330d16a468a0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.762617] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1045.762617] env[63028]: value = "task-2736174" [ 1045.762617] env[63028]: _type = "Task" [ 1045.762617] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.829819] env[63028]: DEBUG nova.compute.manager [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Received event network-vif-plugged-f2d3b19c-6db9-4224-812a-45ec031221f0 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1045.830115] env[63028]: DEBUG oslo_concurrency.lockutils [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] Acquiring lock "1cf111f2-df5e-48a6-905a-bc2d3ea45202-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.830275] env[63028]: DEBUG oslo_concurrency.lockutils [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] Lock "1cf111f2-df5e-48a6-905a-bc2d3ea45202-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.830456] env[63028]: DEBUG oslo_concurrency.lockutils [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] Lock "1cf111f2-df5e-48a6-905a-bc2d3ea45202-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.830619] env[63028]: DEBUG nova.compute.manager [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] No waiting events found dispatching network-vif-plugged-f2d3b19c-6db9-4224-812a-45ec031221f0 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1045.830779] env[63028]: WARNING nova.compute.manager [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Received unexpected event network-vif-plugged-f2d3b19c-6db9-4224-812a-45ec031221f0 for instance with vm_state building and task_state spawning. [ 1045.830934] env[63028]: DEBUG nova.compute.manager [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Received event network-changed-f2d3b19c-6db9-4224-812a-45ec031221f0 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1045.831291] env[63028]: DEBUG nova.compute.manager [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Refreshing instance network info cache due to event network-changed-f2d3b19c-6db9-4224-812a-45ec031221f0. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1045.831497] env[63028]: DEBUG oslo_concurrency.lockutils [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] Acquiring lock "refresh_cache-1cf111f2-df5e-48a6-905a-bc2d3ea45202" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.876514] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736170, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073779} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.876792] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1045.877692] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca8da13-ee79-48db-b79b-4d4f6e249b35 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.901987] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 60d18f14-536a-4b0f-912b-21f3f5a30d28/60d18f14-536a-4b0f-912b-21f3f5a30d28.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1045.902817] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a43b9488-14bc-476a-a221-99a2011762c0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.922511] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1045.922511] env[63028]: value = "task-2736175" [ 1045.922511] env[63028]: _type = "Task" [ 1045.922511] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.930611] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736175, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.991825] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Releasing lock "refresh_cache-1cf111f2-df5e-48a6-905a-bc2d3ea45202" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.992181] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Instance network_info: |[{"id": "f2d3b19c-6db9-4224-812a-45ec031221f0", "address": "fa:16:3e:98:c6:85", "network": {"id": "15d7a776-341f-4ba6-b8c6-f0cd9f0688c1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1631909054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c83df00f440248ca9e84394ce6365144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2d3b19c-6d", "ovs_interfaceid": "f2d3b19c-6db9-4224-812a-45ec031221f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1045.992570] env[63028]: DEBUG oslo_concurrency.lockutils [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] Acquired lock "refresh_cache-1cf111f2-df5e-48a6-905a-bc2d3ea45202" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.992784] env[63028]: DEBUG nova.network.neutron [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Refreshing network info cache for port f2d3b19c-6db9-4224-812a-45ec031221f0 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1045.994201] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:c6:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2d3b19c-6db9-4224-812a-45ec031221f0', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1046.003128] env[63028]: DEBUG oslo.service.loopingcall [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1046.006556] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1046.007257] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c185582-a5f6-45af-8df6-c693af66b26a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.030034] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1046.030034] env[63028]: value = "task-2736176" [ 1046.030034] env[63028]: _type = "Task" [ 1046.030034] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.040657] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736176, 'name': CreateVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.043990] env[63028]: DEBUG oslo_vmware.api [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736171, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.173564] env[63028]: DEBUG oslo_vmware.api [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Task: {'id': task-2736173, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.362956} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.173925] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1046.174037] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1046.174245] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1046.174423] env[63028]: INFO nova.compute.manager [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1046.174759] env[63028]: DEBUG oslo.service.loopingcall [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1046.174958] env[63028]: DEBUG nova.compute.manager [-] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1046.175092] env[63028]: DEBUG nova.network.neutron [-] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1046.263022] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fffae308-21b3-4b5b-a9cb-3d79d782e7ea tempest-ServersNegativeTestJSON-852925760 tempest-ServersNegativeTestJSON-852925760-project-member] Lock "63524cd8-81de-419f-bb07-0326f3cb393f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.129s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.282867] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736174, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.435121] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736175, 'name': ReconfigVM_Task, 'duration_secs': 0.364997} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.435473] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 60d18f14-536a-4b0f-912b-21f3f5a30d28/60d18f14-536a-4b0f-912b-21f3f5a30d28.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1046.436262] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbf72e87-d1de-4573-9a03-51c7e8a7978a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.443340] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1046.443340] env[63028]: value = "task-2736177" [ 1046.443340] env[63028]: _type = "Task" [ 1046.443340] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.451288] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736177, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.546878] env[63028]: DEBUG oslo_vmware.api [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736171, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.550367] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736176, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.561717] env[63028]: DEBUG nova.network.neutron [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Updated VIF entry in instance network info cache for port f2d3b19c-6db9-4224-812a-45ec031221f0. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1046.561982] env[63028]: DEBUG nova.network.neutron [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Updating instance_info_cache with network_info: [{"id": "f2d3b19c-6db9-4224-812a-45ec031221f0", "address": "fa:16:3e:98:c6:85", "network": {"id": "15d7a776-341f-4ba6-b8c6-f0cd9f0688c1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1631909054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c83df00f440248ca9e84394ce6365144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2d3b19c-6d", "ovs_interfaceid": "f2d3b19c-6db9-4224-812a-45ec031221f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.778216] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736174, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713411} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.784192] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 3566ab6f-1f8a-472d-9efb-47fa2520a215/3566ab6f-1f8a-472d-9efb-47fa2520a215.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1046.784830] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1046.785523] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-249f38e3-1963-4d4d-a767-3dce634bcac6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.796905] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1046.796905] env[63028]: value = "task-2736178" [ 1046.796905] env[63028]: _type = "Task" [ 1046.796905] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.805308] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736178, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.919358] env[63028]: DEBUG nova.network.neutron [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance_info_cache with network_info: [{"id": "1f5c01d1-9623-425e-8309-336dd1d961fa", "address": "fa:16:3e:7a:87:25", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f5c01d1-96", "ovs_interfaceid": "1f5c01d1-9623-425e-8309-336dd1d961fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.954242] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736177, 'name': Rename_Task, 'duration_secs': 0.153006} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.954615] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1046.954926] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-417a8ae9-c386-432c-89e8-fe8868372c5f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.965024] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1046.965024] env[63028]: value = "task-2736179" [ 1046.965024] env[63028]: _type = "Task" [ 1046.965024] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.974103] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736179, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.045763] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736176, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.052248] env[63028]: DEBUG oslo_vmware.api [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736171, 'name': ReconfigVM_Task, 'duration_secs': 1.497397} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.052573] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Reconfigured VM instance instance-00000045 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1047.058041] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86b29494-8278-4a05-9a58-2d01770b207a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.073912] env[63028]: DEBUG oslo_concurrency.lockutils [req-87286b8a-370f-4eff-923a-596c6832ace7 req-5ea44899-1664-4fcf-849d-c6e126205363 service nova] Releasing lock "refresh_cache-1cf111f2-df5e-48a6-905a-bc2d3ea45202" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.080348] env[63028]: DEBUG oslo_vmware.api [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1047.080348] env[63028]: value = "task-2736180" [ 1047.080348] env[63028]: _type = "Task" [ 1047.080348] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.093915] env[63028]: DEBUG oslo_vmware.api [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736180, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.137347] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af18c69-46a8-43ff-bfd7-29c0c64bf40a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.145319] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e4845a1-8fd3-458d-b5a8-c178c8876d62 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.178918] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1f86fd-c0a1-447d-8766-868a4290bfd3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.190021] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a665a650-95f5-4290-af27-844dc8d408ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.204202] env[63028]: DEBUG nova.compute.provider_tree [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.304783] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736178, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069102} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.305160] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1047.305957] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98076c55-980b-4753-bc10-70a8a65eb4fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.328468] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 3566ab6f-1f8a-472d-9efb-47fa2520a215/3566ab6f-1f8a-472d-9efb-47fa2520a215.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.328699] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9af82ac8-8f35-4ec0-9a45-c94723f069f7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.348554] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1047.348554] env[63028]: value = "task-2736181" [ 1047.348554] env[63028]: _type = "Task" [ 1047.348554] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.357255] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736181, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.423960] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.476340] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736179, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.542119] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736176, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.592469] env[63028]: DEBUG oslo_vmware.api [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736180, 'name': ReconfigVM_Task, 'duration_secs': 0.23999} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.592921] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550804', 'volume_id': '0e21440a-90bd-4920-bda0-bdf25396cbd3', 'name': 'volume-0e21440a-90bd-4920-bda0-bdf25396cbd3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a50e1167-d8ed-4099-83c3-a5066ab0be1f', 'attached_at': '', 'detached_at': '', 'volume_id': '0e21440a-90bd-4920-bda0-bdf25396cbd3', 'serial': '0e21440a-90bd-4920-bda0-bdf25396cbd3'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1047.712780] env[63028]: DEBUG nova.scheduler.client.report [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1047.717029] env[63028]: DEBUG nova.network.neutron [-] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.860702] env[63028]: DEBUG nova.compute.manager [req-172fa8d1-c98f-413f-81fe-491ed9a9cdcc req-cc86c7b8-42f4-40e2-9f44-f9232c03aec9 service nova] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Received event network-vif-deleted-5dc30fb0-c128-49d6-a5d1-cd0f53cc9958 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1047.864979] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736181, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.959015] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6955870-8150-4baf-801e-cac06f542f09 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.982824] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d872ca4-6bc8-440f-8e0d-d87610f699b7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.990023] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736179, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.994185] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance '56e6ade9-893b-4c85-b0b8-e9f7b12cbad6' progress to 83 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1048.043364] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736176, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.144252] env[63028]: DEBUG nova.objects.instance [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lazy-loading 'flavor' on Instance uuid a50e1167-d8ed-4099-83c3-a5066ab0be1f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.219315] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.503s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.220011] env[63028]: DEBUG nova.compute.manager [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1048.223251] env[63028]: DEBUG oslo_concurrency.lockutils [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.112s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.223512] env[63028]: DEBUG nova.objects.instance [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lazy-loading 'resources' on Instance uuid 46dc76bc-854f-46ad-9db5-21cf6f40fb21 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.225238] env[63028]: INFO nova.compute.manager [-] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Took 2.05 seconds to deallocate network for instance. [ 1048.360132] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736181, 'name': ReconfigVM_Task, 'duration_secs': 0.902739} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.360425] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 3566ab6f-1f8a-472d-9efb-47fa2520a215/3566ab6f-1f8a-472d-9efb-47fa2520a215.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.361144] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd100fa5-d9d3-4c3e-8640-413f4d372183 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.367040] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1048.367040] env[63028]: value = "task-2736182" [ 1048.367040] env[63028]: _type = "Task" [ 1048.367040] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.376265] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736182, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.488546] env[63028]: DEBUG oslo_vmware.api [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736179, 'name': PowerOnVM_Task, 'duration_secs': 1.396615} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.489085] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1048.489085] env[63028]: INFO nova.compute.manager [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Took 9.31 seconds to spawn the instance on the hypervisor. [ 1048.489287] env[63028]: DEBUG nova.compute.manager [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1048.490537] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416c1a7f-461d-4fa6-bfe0-d2e8c25273a7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.500170] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1048.503104] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0613fc35-6ce9-443d-9519-8971a10777c6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.509774] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1048.509774] env[63028]: value = "task-2736183" [ 1048.509774] env[63028]: _type = "Task" [ 1048.509774] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.518629] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736183, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.542963] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736176, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.610065] env[63028]: DEBUG oslo_concurrency.lockutils [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.611022] env[63028]: DEBUG oslo_concurrency.lockutils [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.728105] env[63028]: DEBUG nova.compute.utils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1048.731829] env[63028]: DEBUG nova.compute.manager [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1048.732274] env[63028]: DEBUG nova.network.neutron [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1048.734989] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.774127] env[63028]: DEBUG nova.policy [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b48f3f2a85945379bdb33bf153bde9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25a6457f62d149629c09589feb1a550c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1048.880367] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736182, 'name': Rename_Task, 'duration_secs': 0.202521} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.884026] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1048.884026] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fe022b2-e1ac-4cf7-97a6-db06fc45058b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.893828] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1048.893828] env[63028]: value = "task-2736184" [ 1048.893828] env[63028]: _type = "Task" [ 1048.893828] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.903771] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.020663] env[63028]: INFO nova.compute.manager [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Took 25.27 seconds to build instance. [ 1049.026904] env[63028]: DEBUG oslo_vmware.api [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736183, 'name': PowerOnVM_Task, 'duration_secs': 0.421641} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.027493] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.027840] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55a1c518-d590-4078-972f-2184f70fe3bf tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance '56e6ade9-893b-4c85-b0b8-e9f7b12cbad6' progress to 100 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1049.045276] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736176, 'name': CreateVM_Task, 'duration_secs': 2.743092} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.047690] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1049.048844] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.048948] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.049264] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1049.049523] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5238f06-3f12-4d7c-abcb-1da336f7c54a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.054732] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1049.054732] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52505ee6-2536-7e49-011c-1d5757cb5b8d" [ 1049.054732] env[63028]: _type = "Task" [ 1049.054732] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.065052] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52505ee6-2536-7e49-011c-1d5757cb5b8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.093808] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1208fe51-4119-4863-b530-f01fe2dedec7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.102776] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b1ca90-9020-4ce6-8545-5f9c3596b5a2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.131823] env[63028]: INFO nova.compute.manager [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Detaching volume fff41433-1dbe-4075-9b8b-6bae1342802a [ 1049.134640] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfce3ee9-cce2-4d86-b6d2-b81293c63001 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.142937] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752a4e6f-8b01-490f-b2c2-a7c076c87ca9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.156695] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1ebfb8de-44b3-4435-851b-8b2a3ad71f08 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.280s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.157949] env[63028]: DEBUG nova.compute.provider_tree [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.159783] env[63028]: DEBUG nova.network.neutron [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Successfully created port: 60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1049.168854] env[63028]: INFO nova.virt.block_device [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Attempting to driver detach volume fff41433-1dbe-4075-9b8b-6bae1342802a from mountpoint /dev/sdb [ 1049.169091] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1049.169344] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550831', 'volume_id': 'fff41433-1dbe-4075-9b8b-6bae1342802a', 'name': 'volume-fff41433-1dbe-4075-9b8b-6bae1342802a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8bb61bfa-d44e-4e06-867a-445d9b3db660', 'attached_at': '', 'detached_at': '', 'volume_id': 'fff41433-1dbe-4075-9b8b-6bae1342802a', 'serial': 'fff41433-1dbe-4075-9b8b-6bae1342802a'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1049.170713] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8569c9a-36b2-4f90-a538-f06fd3beffca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.191674] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4c9f4e-9aaa-462f-87c2-c94986e497b7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.198282] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9ca4bd-1dc9-4fe9-b40b-3835a841dbb0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.223425] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a77bce9-00b6-4587-aab7-55ff1def0f19 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.240325] env[63028]: DEBUG nova.compute.manager [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1049.243580] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] The volume has not been displaced from its original location: [datastore2] volume-fff41433-1dbe-4075-9b8b-6bae1342802a/volume-fff41433-1dbe-4075-9b8b-6bae1342802a.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1049.249155] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Reconfiguring VM instance instance-00000050 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1049.249802] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-151d1217-e343-45fc-9ed7-85cbb8d56cda {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.270257] env[63028]: DEBUG oslo_vmware.api [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1049.270257] env[63028]: value = "task-2736185" [ 1049.270257] env[63028]: _type = "Task" [ 1049.270257] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.281921] env[63028]: DEBUG oslo_vmware.api [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736185, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.396094] env[63028]: DEBUG oslo_concurrency.lockutils [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.396094] env[63028]: DEBUG oslo_concurrency.lockutils [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.396199] env[63028]: DEBUG oslo_concurrency.lockutils [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.397755] env[63028]: DEBUG oslo_concurrency.lockutils [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.397755] env[63028]: DEBUG oslo_concurrency.lockutils [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.402289] env[63028]: INFO nova.compute.manager [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Terminating instance [ 1049.408999] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736184, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.523548] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5d13b0db-b56d-43b7-9f3a-f0646936a40d tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "60d18f14-536a-4b0f-912b-21f3f5a30d28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.789s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.564919] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52505ee6-2536-7e49-011c-1d5757cb5b8d, 'name': SearchDatastore_Task, 'duration_secs': 0.014622} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.565242] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.565472] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1049.565728] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.565876] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.566068] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1049.566349] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f2a909f-fc3d-422f-bce3-64b4b2d8db0a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.581362] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1049.581596] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1049.582337] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dd76322-0458-400b-b327-af9c9136c5a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.587965] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1049.587965] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528cb825-92b7-99b9-874c-7f1f29ada9e4" [ 1049.587965] env[63028]: _type = "Task" [ 1049.587965] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.595474] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528cb825-92b7-99b9-874c-7f1f29ada9e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.662895] env[63028]: DEBUG nova.scheduler.client.report [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.780396] env[63028]: DEBUG oslo_vmware.api [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736185, 'name': ReconfigVM_Task, 'duration_secs': 0.256768} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.780638] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Reconfigured VM instance instance-00000050 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1049.785984] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba68d374-a566-4d1b-9ee7-0c8e8392fea7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.803326] env[63028]: DEBUG oslo_vmware.api [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1049.803326] env[63028]: value = "task-2736186" [ 1049.803326] env[63028]: _type = "Task" [ 1049.803326] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.811771] env[63028]: DEBUG oslo_vmware.api [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736186, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.906435] env[63028]: DEBUG nova.compute.manager [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1049.907130] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1049.911029] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d73011a-e222-44ae-8bf8-110839f656fa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.914197] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736184, 'name': PowerOnVM_Task, 'duration_secs': 0.821164} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.914528] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.914827] env[63028]: INFO nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Took 8.71 seconds to spawn the instance on the hypervisor. [ 1049.915024] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1049.916245] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1448c64a-a568-4730-baa2-9950fef5220d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.923143] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.923939] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a251b1ce-92c3-40b0-a20d-1af6ff5e6984 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.934561] env[63028]: DEBUG oslo_vmware.api [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1049.934561] env[63028]: value = "task-2736187" [ 1049.934561] env[63028]: _type = "Task" [ 1049.934561] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.944665] env[63028]: DEBUG oslo_vmware.api [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736187, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.098245] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528cb825-92b7-99b9-874c-7f1f29ada9e4, 'name': SearchDatastore_Task, 'duration_secs': 0.03029} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.099044] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6f002d1-6d1c-46c6-8763-acb5d8f93a0e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.104499] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1050.104499] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a0c7f7-aa7a-7461-a100-15ccec10fdd7" [ 1050.104499] env[63028]: _type = "Task" [ 1050.104499] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.112440] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a0c7f7-aa7a-7461-a100-15ccec10fdd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.172410] env[63028]: DEBUG oslo_concurrency.lockutils [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.945s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.173095] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.575s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.173585] env[63028]: DEBUG nova.objects.instance [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lazy-loading 'resources' on Instance uuid b16d85d7-13f3-4be0-8495-2fd2c1476f01 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.199038] env[63028]: INFO nova.scheduler.client.report [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted allocations for instance 46dc76bc-854f-46ad-9db5-21cf6f40fb21 [ 1050.256490] env[63028]: DEBUG nova.compute.manager [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1050.285196] env[63028]: DEBUG nova.virt.hardware [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1050.285447] env[63028]: DEBUG nova.virt.hardware [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1050.285628] env[63028]: DEBUG nova.virt.hardware [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1050.286102] env[63028]: DEBUG nova.virt.hardware [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1050.286102] env[63028]: DEBUG nova.virt.hardware [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1050.286213] env[63028]: DEBUG nova.virt.hardware [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1050.286329] env[63028]: DEBUG nova.virt.hardware [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1050.286500] env[63028]: DEBUG nova.virt.hardware [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1050.286673] env[63028]: DEBUG nova.virt.hardware [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1050.286882] env[63028]: DEBUG nova.virt.hardware [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1050.287016] env[63028]: DEBUG nova.virt.hardware [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1050.288148] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374cdc60-3b17-448a-ba4a-626b92b2be98 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.292710] env[63028]: DEBUG nova.compute.manager [req-376b817f-ff7c-41f3-8828-6d2f53a1ac60 req-207d7323-ead2-425e-b928-a85ab50b1377 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Received event network-changed-2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1050.292898] env[63028]: DEBUG nova.compute.manager [req-376b817f-ff7c-41f3-8828-6d2f53a1ac60 req-207d7323-ead2-425e-b928-a85ab50b1377 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Refreshing instance network info cache due to event network-changed-2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1050.293123] env[63028]: DEBUG oslo_concurrency.lockutils [req-376b817f-ff7c-41f3-8828-6d2f53a1ac60 req-207d7323-ead2-425e-b928-a85ab50b1377 service nova] Acquiring lock "refresh_cache-60d18f14-536a-4b0f-912b-21f3f5a30d28" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.293270] env[63028]: DEBUG oslo_concurrency.lockutils [req-376b817f-ff7c-41f3-8828-6d2f53a1ac60 req-207d7323-ead2-425e-b928-a85ab50b1377 service nova] Acquired lock "refresh_cache-60d18f14-536a-4b0f-912b-21f3f5a30d28" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.293434] env[63028]: DEBUG nova.network.neutron [req-376b817f-ff7c-41f3-8828-6d2f53a1ac60 req-207d7323-ead2-425e-b928-a85ab50b1377 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Refreshing network info cache for port 2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1050.299931] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6932a8-b661-4be9-af6e-23ea2d8e7e06 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.313217] env[63028]: DEBUG oslo_vmware.api [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736186, 'name': ReconfigVM_Task, 'duration_secs': 0.207244} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.320338] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550831', 'volume_id': 'fff41433-1dbe-4075-9b8b-6bae1342802a', 'name': 'volume-fff41433-1dbe-4075-9b8b-6bae1342802a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8bb61bfa-d44e-4e06-867a-445d9b3db660', 'attached_at': '', 'detached_at': '', 'volume_id': 'fff41433-1dbe-4075-9b8b-6bae1342802a', 'serial': 'fff41433-1dbe-4075-9b8b-6bae1342802a'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1050.441103] env[63028]: INFO nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Took 20.58 seconds to build instance. [ 1050.449324] env[63028]: DEBUG oslo_vmware.api [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736187, 'name': PowerOffVM_Task, 'duration_secs': 0.212326} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.449757] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1050.449861] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1050.450044] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd880751-4c73-491a-abf8-3cf116ff089e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.512040] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1050.512040] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1050.512172] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Deleting the datastore file [datastore1] a50e1167-d8ed-4099-83c3-a5066ab0be1f {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1050.512847] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d6388d3-f46c-452c-9139-00bf97fcd9ed {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.520106] env[63028]: DEBUG oslo_vmware.api [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1050.520106] env[63028]: value = "task-2736189" [ 1050.520106] env[63028]: _type = "Task" [ 1050.520106] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.532728] env[63028]: DEBUG oslo_vmware.api [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736189, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.615270] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a0c7f7-aa7a-7461-a100-15ccec10fdd7, 'name': SearchDatastore_Task, 'duration_secs': 0.01074} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.615551] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.615830] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 1cf111f2-df5e-48a6-905a-bc2d3ea45202/1cf111f2-df5e-48a6-905a-bc2d3ea45202.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1050.616108] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a582e591-54bb-4f3a-9ca6-ff5f3403a06b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.622329] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1050.622329] env[63028]: value = "task-2736190" [ 1050.622329] env[63028]: _type = "Task" [ 1050.622329] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.629948] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736190, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.707118] env[63028]: DEBUG oslo_concurrency.lockutils [None req-87d04e4f-fe7d-4082-b35e-2aecbc7aed47 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "46dc76bc-854f-46ad-9db5-21cf6f40fb21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.911s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.889477] env[63028]: DEBUG nova.objects.instance [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lazy-loading 'flavor' on Instance uuid 8bb61bfa-d44e-4e06-867a-445d9b3db660 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.941957] env[63028]: DEBUG nova.network.neutron [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Successfully updated port: 60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1050.943431] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "3566ab6f-1f8a-472d-9efb-47fa2520a215" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.128s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.036920] env[63028]: DEBUG oslo_vmware.api [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736189, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251902} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.037229] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1051.037420] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1051.037599] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1051.037797] env[63028]: INFO nova.compute.manager [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1051.038086] env[63028]: DEBUG oslo.service.loopingcall [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1051.038293] env[63028]: DEBUG nova.compute.manager [-] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1051.038389] env[63028]: DEBUG nova.network.neutron [-] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1051.069585] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.069814] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.070015] env[63028]: DEBUG nova.compute.manager [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Going to confirm migration 5 {{(pid=63028) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1051.106378] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb42aa6b-ee05-43d1-8199-b460c402a578 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.119622] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016d7547-bbbc-44dc-8d79-4adb6a16ac12 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.172528] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b3d91b-2a6c-4f32-96b7-348226c22476 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.175575] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736190, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.185489] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b53d1c-f79a-4bbc-91d2-0eaf7f9d51e8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.203953] env[63028]: DEBUG nova.compute.provider_tree [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.380657] env[63028]: DEBUG nova.network.neutron [req-376b817f-ff7c-41f3-8828-6d2f53a1ac60 req-207d7323-ead2-425e-b928-a85ab50b1377 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Updated VIF entry in instance network info cache for port 2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1051.381147] env[63028]: DEBUG nova.network.neutron [req-376b817f-ff7c-41f3-8828-6d2f53a1ac60 req-207d7323-ead2-425e-b928-a85ab50b1377 service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Updating instance_info_cache with network_info: [{"id": "2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291", "address": "fa:16:3e:d1:35:37", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e1ed2b6-3a", "ovs_interfaceid": "2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.444975] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.445251] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.445448] env[63028]: DEBUG nova.network.neutron [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1051.637206] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736190, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.606188} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.638317] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.638482] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.638656] env[63028]: DEBUG nova.network.neutron [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1051.638839] env[63028]: DEBUG nova.objects.instance [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lazy-loading 'info_cache' on Instance uuid 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.640246] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 1cf111f2-df5e-48a6-905a-bc2d3ea45202/1cf111f2-df5e-48a6-905a-bc2d3ea45202.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1051.640475] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1051.640950] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e43a624-45f8-4b36-9163-7e5d88b35b04 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.646903] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1051.646903] env[63028]: value = "task-2736191" [ 1051.646903] env[63028]: _type = "Task" [ 1051.646903] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.654771] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736191, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.709027] env[63028]: DEBUG nova.scheduler.client.report [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1051.884863] env[63028]: DEBUG oslo_concurrency.lockutils [req-376b817f-ff7c-41f3-8828-6d2f53a1ac60 req-207d7323-ead2-425e-b928-a85ab50b1377 service nova] Releasing lock "refresh_cache-60d18f14-536a-4b0f-912b-21f3f5a30d28" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.902203] env[63028]: DEBUG oslo_concurrency.lockutils [None req-799e32b6-85e5-444b-9a50-b3905eb7775a tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.292s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.986483] env[63028]: DEBUG nova.network.neutron [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1052.157409] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736191, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06235} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.157751] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1052.158543] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4d54d5-d5fa-409e-a7bb-42d388620748 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.161580] env[63028]: DEBUG nova.network.neutron [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updating instance_info_cache with network_info: [{"id": "60891063-6c30-480a-8e2b-f3960496f2fd", "address": "fa:16:3e:84:9a:c5", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60891063-6c", "ovs_interfaceid": "60891063-6c30-480a-8e2b-f3960496f2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.183795] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 1cf111f2-df5e-48a6-905a-bc2d3ea45202/1cf111f2-df5e-48a6-905a-bc2d3ea45202.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.185767] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ab6642c-781d-47f7-adb6-d59c5aa8c3f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.210351] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1052.210351] env[63028]: value = "task-2736192" [ 1052.210351] env[63028]: _type = "Task" [ 1052.210351] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.213854] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.043s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.215944] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.506s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.216229] env[63028]: DEBUG nova.objects.instance [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lazy-loading 'resources' on Instance uuid c492dea4-9779-4460-a559-5b82fb0643f0 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.222256] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736192, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.229903] env[63028]: INFO nova.scheduler.client.report [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Deleted allocations for instance b16d85d7-13f3-4be0-8495-2fd2c1476f01 [ 1052.274103] env[63028]: DEBUG nova.network.neutron [-] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.318644] env[63028]: DEBUG nova.compute.manager [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Received event network-vif-plugged-60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1052.318879] env[63028]: DEBUG oslo_concurrency.lockutils [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] Acquiring lock "e048cadf-9dc1-4eb7-a825-422d0736231c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.319246] env[63028]: DEBUG oslo_concurrency.lockutils [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] Lock "e048cadf-9dc1-4eb7-a825-422d0736231c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.319372] env[63028]: DEBUG oslo_concurrency.lockutils [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] Lock "e048cadf-9dc1-4eb7-a825-422d0736231c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.319546] env[63028]: DEBUG nova.compute.manager [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] No waiting events found dispatching network-vif-plugged-60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1052.319713] env[63028]: WARNING nova.compute.manager [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Received unexpected event network-vif-plugged-60891063-6c30-480a-8e2b-f3960496f2fd for instance with vm_state building and task_state spawning. [ 1052.319875] env[63028]: DEBUG nova.compute.manager [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Received event network-changed-60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1052.320042] env[63028]: DEBUG nova.compute.manager [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Refreshing instance network info cache due to event network-changed-60891063-6c30-480a-8e2b-f3960496f2fd. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1052.320214] env[63028]: DEBUG oslo_concurrency.lockutils [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] Acquiring lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.452160] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.452808] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.665198] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.665548] env[63028]: DEBUG nova.compute.manager [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Instance network_info: |[{"id": "60891063-6c30-480a-8e2b-f3960496f2fd", "address": "fa:16:3e:84:9a:c5", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60891063-6c", "ovs_interfaceid": "60891063-6c30-480a-8e2b-f3960496f2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1052.665881] env[63028]: DEBUG oslo_concurrency.lockutils [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] Acquired lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.666071] env[63028]: DEBUG nova.network.neutron [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Refreshing network info cache for port 60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1052.667413] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:9a:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '60891063-6c30-480a-8e2b-f3960496f2fd', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1052.674785] env[63028]: DEBUG oslo.service.loopingcall [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1052.679778] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1052.680274] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0975a54-94ca-4f86-bd6f-9c0a3d9a42ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.700017] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1052.700017] env[63028]: value = "task-2736193" [ 1052.700017] env[63028]: _type = "Task" [ 1052.700017] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.707743] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736193, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.721729] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.741219] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2498bff7-1e43-4867-b0b7-b0701db51cf1 tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "b16d85d7-13f3-4be0-8495-2fd2c1476f01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.177s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.777438] env[63028]: INFO nova.compute.manager [-] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Took 1.74 seconds to deallocate network for instance. [ 1052.956371] env[63028]: DEBUG nova.compute.manager [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1052.983309] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.983523] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.983740] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "8bb61bfa-d44e-4e06-867a-445d9b3db660-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.983930] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.984133] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.989382] env[63028]: INFO nova.compute.manager [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Terminating instance [ 1053.100917] env[63028]: DEBUG nova.network.neutron [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance_info_cache with network_info: [{"id": "1f5c01d1-9623-425e-8309-336dd1d961fa", "address": "fa:16:3e:7a:87:25", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f5c01d1-96", "ovs_interfaceid": "1f5c01d1-9623-425e-8309-336dd1d961fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.147522] env[63028]: DEBUG nova.network.neutron [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updated VIF entry in instance network info cache for port 60891063-6c30-480a-8e2b-f3960496f2fd. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1053.147901] env[63028]: DEBUG nova.network.neutron [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updating instance_info_cache with network_info: [{"id": "60891063-6c30-480a-8e2b-f3960496f2fd", "address": "fa:16:3e:84:9a:c5", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60891063-6c", "ovs_interfaceid": "60891063-6c30-480a-8e2b-f3960496f2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.210622] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736193, 'name': CreateVM_Task, 'duration_secs': 0.391792} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.210821] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1053.211449] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.211619] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.211952] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1053.212246] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-261f042c-ebab-4460-a449-e4ec52fc7322 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.219666] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1053.219666] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52da8c26-5a37-c94d-642c-3bdd696cdd3a" [ 1053.219666] env[63028]: _type = "Task" [ 1053.219666] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.222938] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736192, 'name': ReconfigVM_Task, 'duration_secs': 0.640767} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.226629] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 1cf111f2-df5e-48a6-905a-bc2d3ea45202/1cf111f2-df5e-48a6-905a-bc2d3ea45202.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.227705] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2dc086-09fc-488f-ad34-da5b0223ae7b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.230185] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-695ff9c0-ec1c-4508-9527-df2ac71b3a51 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.239718] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39ba28d-945a-43f3-aac5-d4beb6b86c69 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.242712] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52da8c26-5a37-c94d-642c-3bdd696cdd3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.242986] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1053.242986] env[63028]: value = "task-2736194" [ 1053.242986] env[63028]: _type = "Task" [ 1053.242986] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.271984] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a25fa6-323e-4933-aa38-ce4aa0972e91 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.278219] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736194, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.283318] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ec5a17-ddf5-4782-8645-fa420cc977b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.289883] env[63028]: DEBUG oslo_concurrency.lockutils [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.299688] env[63028]: DEBUG nova.compute.provider_tree [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1053.476514] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.499884] env[63028]: DEBUG nova.compute.manager [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1053.500203] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1053.501110] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e546f7-505e-4947-a2c1-2b5f3fabd6c1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.509652] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1053.510516] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90aaefa2-e73c-495a-8c3d-e69facfd637a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.517666] env[63028]: DEBUG oslo_vmware.api [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1053.517666] env[63028]: value = "task-2736195" [ 1053.517666] env[63028]: _type = "Task" [ 1053.517666] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.525880] env[63028]: DEBUG oslo_vmware.api [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736195, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.604577] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "refresh_cache-56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.604965] env[63028]: DEBUG nova.objects.instance [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lazy-loading 'migration_context' on Instance uuid 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.650855] env[63028]: DEBUG oslo_concurrency.lockutils [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] Releasing lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.651940] env[63028]: DEBUG nova.compute.manager [req-5837d58e-e231-4c64-9f72-f2aa9d192285 req-22f4efd3-9ca4-403a-afaa-154e77bef53a service nova] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Received event network-vif-deleted-abc41dea-8b6f-4cf7-b02f-21996a0aaf8d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1053.733040] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52da8c26-5a37-c94d-642c-3bdd696cdd3a, 'name': SearchDatastore_Task, 'duration_secs': 0.028726} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.733361] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.733558] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1053.733808] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.733974] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.734178] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1053.734464] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-818b9bd7-9136-408e-b6e8-115a86578d37 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.744029] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1053.744215] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1053.747964] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4f431cc-a378-4209-a7c1-968c83801c59 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.755327] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736194, 'name': Rename_Task, 'duration_secs': 0.429531} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.756164] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1053.756477] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1053.756477] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fbc24e-3109-bd8f-2e68-481c2d77cf24" [ 1053.756477] env[63028]: _type = "Task" [ 1053.756477] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.756689] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-941e9cb8-60b5-472a-89a2-4f1abf67f178 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.766529] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fbc24e-3109-bd8f-2e68-481c2d77cf24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.768041] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1053.768041] env[63028]: value = "task-2736196" [ 1053.768041] env[63028]: _type = "Task" [ 1053.768041] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.775810] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.804288] env[63028]: DEBUG nova.scheduler.client.report [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.026708] env[63028]: DEBUG oslo_vmware.api [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736195, 'name': PowerOffVM_Task, 'duration_secs': 0.333004} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.026968] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1054.027162] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1054.027431] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96ff71ee-8b33-4989-9a73-ba02b6273b2e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.099465] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1054.099465] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1054.099668] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Deleting the datastore file [datastore2] 8bb61bfa-d44e-4e06-867a-445d9b3db660 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1054.099892] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ffd9717c-cfb7-49f0-9799-fdc05717611c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.107413] env[63028]: DEBUG nova.objects.base [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Object Instance<56e6ade9-893b-4c85-b0b8-e9f7b12cbad6> lazy-loaded attributes: info_cache,migration_context {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1054.107821] env[63028]: DEBUG oslo_vmware.api [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1054.107821] env[63028]: value = "task-2736198" [ 1054.107821] env[63028]: _type = "Task" [ 1054.107821] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.108538] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa02cee6-eea5-4540-b18a-d131752f620c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.118509] env[63028]: DEBUG oslo_vmware.api [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736198, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.133323] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ee758a4-84d1-4b9a-ae3d-a8828d436cd7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.138192] env[63028]: DEBUG oslo_vmware.api [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1054.138192] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52245f3b-d4f3-43ad-a7f5-3b90b2902aab" [ 1054.138192] env[63028]: _type = "Task" [ 1054.138192] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.145645] env[63028]: DEBUG oslo_vmware.api [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52245f3b-d4f3-43ad-a7f5-3b90b2902aab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.184439] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "56d6982d-9f76-4952-8c8b-f64b3c8d02fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.184744] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "56d6982d-9f76-4952-8c8b-f64b3c8d02fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.184964] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "56d6982d-9f76-4952-8c8b-f64b3c8d02fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.185168] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "56d6982d-9f76-4952-8c8b-f64b3c8d02fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.185342] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "56d6982d-9f76-4952-8c8b-f64b3c8d02fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.187607] env[63028]: INFO nova.compute.manager [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Terminating instance [ 1054.268518] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fbc24e-3109-bd8f-2e68-481c2d77cf24, 'name': SearchDatastore_Task, 'duration_secs': 0.020368} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.272200] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd6dc6de-2f17-47b5-a4f3-b209f73fec9b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.279393] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736196, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.280627] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1054.280627] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5213a0dd-a4e5-f5b5-54ff-e56be1d2d621" [ 1054.280627] env[63028]: _type = "Task" [ 1054.280627] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.288261] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5213a0dd-a4e5-f5b5-54ff-e56be1d2d621, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.309221] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.093s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.311697] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.108s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.311848] env[63028]: DEBUG nova.objects.instance [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Lazy-loading 'resources' on Instance uuid da23282a-bbda-47bf-9d9c-337ee9996779 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.335564] env[63028]: INFO nova.scheduler.client.report [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted allocations for instance c492dea4-9779-4460-a559-5b82fb0643f0 [ 1054.622585] env[63028]: DEBUG oslo_vmware.api [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736198, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.27689} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.622846] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1054.623049] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1054.623234] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1054.623411] env[63028]: INFO nova.compute.manager [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1054.623651] env[63028]: DEBUG oslo.service.loopingcall [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1054.623855] env[63028]: DEBUG nova.compute.manager [-] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1054.623938] env[63028]: DEBUG nova.network.neutron [-] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1054.652389] env[63028]: DEBUG oslo_vmware.api [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52245f3b-d4f3-43ad-a7f5-3b90b2902aab, 'name': SearchDatastore_Task, 'duration_secs': 0.007741} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.652389] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.691882] env[63028]: DEBUG nova.compute.manager [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1054.692130] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1054.693054] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4af83e4-9dab-4ad1-b02e-775674206f26 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.700377] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1054.700696] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-850918f7-d078-4e1b-afeb-7333d61d59d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.706382] env[63028]: DEBUG oslo_vmware.api [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1054.706382] env[63028]: value = "task-2736199" [ 1054.706382] env[63028]: _type = "Task" [ 1054.706382] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.713995] env[63028]: DEBUG oslo_vmware.api [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736199, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.781912] env[63028]: DEBUG oslo_vmware.api [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736196, 'name': PowerOnVM_Task, 'duration_secs': 0.716147} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.781912] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1054.781912] env[63028]: INFO nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Took 11.07 seconds to spawn the instance on the hypervisor. [ 1054.781912] env[63028]: DEBUG nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.781912] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1500b31b-d052-4579-8e62-3d418ed9e51c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.798181] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5213a0dd-a4e5-f5b5-54ff-e56be1d2d621, 'name': SearchDatastore_Task, 'duration_secs': 0.008831} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.798181] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1054.798181] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] e048cadf-9dc1-4eb7-a825-422d0736231c/e048cadf-9dc1-4eb7-a825-422d0736231c.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1054.798181] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1401d9b-0d8d-436b-83ef-08ed80df0c9f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.804625] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1054.804625] env[63028]: value = "task-2736200" [ 1054.804625] env[63028]: _type = "Task" [ 1054.804625] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.812882] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736200, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.847270] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cb35495d-a0bd-4e98-8bbe-8d2dc2396165 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "c492dea4-9779-4460-a559-5b82fb0643f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.199s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.204094] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb0b8e6-a38e-42a2-a33b-ee6575932040 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.219487] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06a2c88-b12a-4966-afc3-a6016df820d5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.223639] env[63028]: DEBUG oslo_vmware.api [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736199, 'name': PowerOffVM_Task, 'duration_secs': 0.274205} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.226835] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1055.226835] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1055.226835] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0e58262-0a9b-4341-86bc-f308b17d23d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.252759] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6287f1-dbaa-4cd5-a099-68c753213f23 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.260855] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e58520-4f9a-4e0e-ad6b-bb21c5a4250a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.277477] env[63028]: DEBUG nova.compute.provider_tree [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.304393] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1055.304393] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1055.304393] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Deleting the datastore file [datastore2] 56d6982d-9f76-4952-8c8b-f64b3c8d02fe {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1055.307569] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68caef51-20a6-48ea-b83a-90640b30af60 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.312076] env[63028]: INFO nova.compute.manager [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Took 25.44 seconds to build instance. [ 1055.315754] env[63028]: DEBUG oslo_vmware.api [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for the task: (returnval){ [ 1055.315754] env[63028]: value = "task-2736202" [ 1055.315754] env[63028]: _type = "Task" [ 1055.315754] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.318928] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736200, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.327103] env[63028]: DEBUG oslo_vmware.api [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736202, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.455275] env[63028]: DEBUG nova.compute.manager [req-9bfdbbab-308a-4270-9552-95f916ac5891 req-62b47347-d806-4ed3-86cc-7d506832c45a service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Received event network-vif-deleted-b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1055.458038] env[63028]: INFO nova.compute.manager [req-9bfdbbab-308a-4270-9552-95f916ac5891 req-62b47347-d806-4ed3-86cc-7d506832c45a service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Neutron deleted interface b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4; detaching it from the instance and deleting it from the info cache [ 1055.458038] env[63028]: DEBUG nova.network.neutron [req-9bfdbbab-308a-4270-9552-95f916ac5891 req-62b47347-d806-4ed3-86cc-7d506832c45a service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.781274] env[63028]: DEBUG nova.scheduler.client.report [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1055.819788] env[63028]: DEBUG oslo_concurrency.lockutils [None req-bcc7c8fa-3778-4015-b55b-0ead2bc6e9f2 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "1cf111f2-df5e-48a6-905a-bc2d3ea45202" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.962s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.821042] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736200, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52394} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.823670] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] e048cadf-9dc1-4eb7-a825-422d0736231c/e048cadf-9dc1-4eb7-a825-422d0736231c.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1055.823897] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1055.824476] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6128ed34-f7f0-4485-b6d7-d950ab553c23 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.831613] env[63028]: DEBUG oslo_vmware.api [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736202, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.833065] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1055.833065] env[63028]: value = "task-2736203" [ 1055.833065] env[63028]: _type = "Task" [ 1055.833065] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.840791] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736203, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.936159] env[63028]: DEBUG nova.network.neutron [-] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.959878] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4951f930-ebd4-4b93-8fff-ca7b0816b602 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.972025] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196bfbb3-2dce-4269-8517-01136aa8e832 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.017418] env[63028]: DEBUG nova.compute.manager [req-9bfdbbab-308a-4270-9552-95f916ac5891 req-62b47347-d806-4ed3-86cc-7d506832c45a service nova] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Detach interface failed, port_id=b0d7c3ce-e883-4ccc-80ac-87d06acc7bb4, reason: Instance 8bb61bfa-d44e-4e06-867a-445d9b3db660 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1056.287382] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.976s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.289690] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.845s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.289876] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.290049] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63028) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1056.290730] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.507s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.291828] env[63028]: INFO nova.compute.claims [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1056.295266] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d77525a-8d99-4036-a3df-15b9eb782de9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.301249] env[63028]: DEBUG oslo_concurrency.lockutils [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "3566ab6f-1f8a-472d-9efb-47fa2520a215" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.301450] env[63028]: DEBUG oslo_concurrency.lockutils [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "3566ab6f-1f8a-472d-9efb-47fa2520a215" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.301642] env[63028]: DEBUG oslo_concurrency.lockutils [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "3566ab6f-1f8a-472d-9efb-47fa2520a215-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.301826] env[63028]: DEBUG oslo_concurrency.lockutils [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "3566ab6f-1f8a-472d-9efb-47fa2520a215-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.302032] env[63028]: DEBUG oslo_concurrency.lockutils [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "3566ab6f-1f8a-472d-9efb-47fa2520a215-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.304721] env[63028]: INFO nova.compute.manager [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Terminating instance [ 1056.310952] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740a66cf-74a2-4914-874f-c491f6cf467b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.318165] env[63028]: INFO nova.scheduler.client.report [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Deleted allocations for instance da23282a-bbda-47bf-9d9c-337ee9996779 [ 1056.334773] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce91901d-1d77-45d0-99a6-8df0a4c660dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.344169] env[63028]: DEBUG oslo_vmware.api [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Task: {'id': task-2736202, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.56867} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.345135] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1056.345359] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1056.345593] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1056.345817] env[63028]: INFO nova.compute.manager [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1056.345966] env[63028]: DEBUG oslo.service.loopingcall [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1056.346700] env[63028]: DEBUG nova.compute.manager [-] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1056.346859] env[63028]: DEBUG nova.network.neutron [-] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1056.354273] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736203, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069834} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.355731] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abb0fbd-f99c-4611-8521-6b6d2afb9bd9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.359736] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1056.360362] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eaf70ed-4f92-4351-8543-23208f14f989 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.402417] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "1cf111f2-df5e-48a6-905a-bc2d3ea45202" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.402417] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "1cf111f2-df5e-48a6-905a-bc2d3ea45202" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.402594] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "1cf111f2-df5e-48a6-905a-bc2d3ea45202-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.407666] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "1cf111f2-df5e-48a6-905a-bc2d3ea45202-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.407666] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "1cf111f2-df5e-48a6-905a-bc2d3ea45202-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.412942] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] e048cadf-9dc1-4eb7-a825-422d0736231c/e048cadf-9dc1-4eb7-a825-422d0736231c.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1056.416036] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178132MB free_disk=110GB free_vcpus=48 pci_devices=None {{(pid=63028) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1056.416036] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.416036] env[63028]: INFO nova.compute.manager [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Terminating instance [ 1056.416341] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ab06136-e345-40c8-a9af-12e5e2bcb6d4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.439809] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "1f8415cc-f544-4c89-9863-43d5ae9144e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.439946] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "1f8415cc-f544-4c89-9863-43d5ae9144e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.441087] env[63028]: INFO nova.compute.manager [-] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Took 1.82 seconds to deallocate network for instance. [ 1056.448882] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1056.448882] env[63028]: value = "task-2736204" [ 1056.448882] env[63028]: _type = "Task" [ 1056.448882] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.462062] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736204, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.824024] env[63028]: DEBUG nova.compute.manager [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1056.824024] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.824024] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5fca74-62f2-4fe6-9d48-c50c64c2071e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.831758] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.832245] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-546376ad-476e-49da-9d36-a768c9e7964b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.837145] env[63028]: DEBUG oslo_concurrency.lockutils [None req-42208f00-4ed8-4f3a-a14c-9aad1270b8df tempest-ServerAddressesTestJSON-2051703973 tempest-ServerAddressesTestJSON-2051703973-project-member] Lock "da23282a-bbda-47bf-9d9c-337ee9996779" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.512s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.840880] env[63028]: DEBUG oslo_vmware.api [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1056.840880] env[63028]: value = "task-2736205" [ 1056.840880] env[63028]: _type = "Task" [ 1056.840880] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.850657] env[63028]: DEBUG oslo_vmware.api [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736205, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.943023] env[63028]: DEBUG nova.compute.manager [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1056.943023] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.943023] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d0cec7-25c7-4c6f-8044-063593f442df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.945629] env[63028]: DEBUG nova.compute.manager [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1056.953824] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.957352] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d2ec0fd-e686-4401-be8d-a0cdc979a06b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.960502] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.967116] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736204, 'name': ReconfigVM_Task, 'duration_secs': 0.381393} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.968421] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Reconfigured VM instance instance-00000066 to attach disk [datastore2] e048cadf-9dc1-4eb7-a825-422d0736231c/e048cadf-9dc1-4eb7-a825-422d0736231c.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1056.969241] env[63028]: DEBUG oslo_vmware.api [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1056.969241] env[63028]: value = "task-2736206" [ 1056.969241] env[63028]: _type = "Task" [ 1056.969241] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.969660] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5f8185a-bb94-4d92-943e-a521c735407a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.979878] env[63028]: DEBUG oslo_vmware.api [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736206, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.981624] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1056.981624] env[63028]: value = "task-2736207" [ 1056.981624] env[63028]: _type = "Task" [ 1056.981624] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.993235] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736207, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.234974] env[63028]: DEBUG nova.network.neutron [-] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.354673] env[63028]: DEBUG oslo_vmware.api [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736205, 'name': PowerOffVM_Task, 'duration_secs': 0.222693} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.354979] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1057.355171] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.355449] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba954266-6c62-4de5-ace8-9328afc2312a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.418666] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.418891] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.419078] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Deleting the datastore file [datastore1] 3566ab6f-1f8a-472d-9efb-47fa2520a215 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.419364] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f5e6278-3daa-4969-9d64-5de179773a38 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.429740] env[63028]: DEBUG oslo_vmware.api [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1057.429740] env[63028]: value = "task-2736209" [ 1057.429740] env[63028]: _type = "Task" [ 1057.429740] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.437317] env[63028]: DEBUG oslo_vmware.api [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736209, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.479714] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.485904] env[63028]: DEBUG oslo_vmware.api [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736206, 'name': PowerOffVM_Task, 'duration_secs': 0.182101} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.495129] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1057.495286] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.498151] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db14e8df-5db6-4888-bdb4-4cb76279e1b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.502881] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736207, 'name': Rename_Task, 'duration_secs': 0.178498} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.502881] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1057.502998] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e5da49e-85ce-4b15-a1db-a8e0f294c487 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.510868] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1057.510868] env[63028]: value = "task-2736211" [ 1057.510868] env[63028]: _type = "Task" [ 1057.510868] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.520596] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736211, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.545552] env[63028]: DEBUG nova.compute.manager [req-f900520c-a7a7-4dc8-955b-ee08f3776b87 req-73b96fbd-1fcd-46b9-8fc3-c1fab8ee08a6 service nova] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Received event network-vif-deleted-de1f8509-63e1-41ae-ad48-03c4e8b74871 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1057.596537] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.596777] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.597279] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Deleting the datastore file [datastore1] 1cf111f2-df5e-48a6-905a-bc2d3ea45202 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.597279] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54e5f4e6-ce12-4183-8b44-60d3c98c071f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.606118] env[63028]: DEBUG oslo_vmware.api [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1057.606118] env[63028]: value = "task-2736212" [ 1057.606118] env[63028]: _type = "Task" [ 1057.606118] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.616911] env[63028]: DEBUG oslo_vmware.api [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.676983] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f03622-fab5-4593-8655-9910cd5c886d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.686267] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f17c1a-c7ce-48dd-88c8-83496f05029f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.719720] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae255f8c-71a8-4c55-8c12-fd942f23d7d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.728454] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdeb60e0-c4c5-44be-9922-f0503503c15d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.746859] env[63028]: INFO nova.compute.manager [-] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Took 1.40 seconds to deallocate network for instance. [ 1057.747424] env[63028]: DEBUG nova.compute.provider_tree [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1057.944895] env[63028]: DEBUG oslo_vmware.api [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736209, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163431} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.945826] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.945826] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.945826] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.945967] env[63028]: INFO nova.compute.manager [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1057.946398] env[63028]: DEBUG oslo.service.loopingcall [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1057.946731] env[63028]: DEBUG nova.compute.manager [-] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1057.946817] env[63028]: DEBUG nova.network.neutron [-] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1058.020233] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736211, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.118198] env[63028]: DEBUG oslo_vmware.api [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.399426} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.118475] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1058.118666] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1058.118839] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1058.119018] env[63028]: INFO nova.compute.manager [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1058.119271] env[63028]: DEBUG oslo.service.loopingcall [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1058.119466] env[63028]: DEBUG nova.compute.manager [-] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1058.119562] env[63028]: DEBUG nova.network.neutron [-] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1058.252859] env[63028]: DEBUG nova.scheduler.client.report [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1058.260795] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.520812] env[63028]: DEBUG oslo_vmware.api [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736211, 'name': PowerOnVM_Task, 'duration_secs': 0.840861} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.523809] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1058.524281] env[63028]: INFO nova.compute.manager [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1058.524531] env[63028]: DEBUG nova.compute.manager [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1058.525439] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9229427-87ab-476b-94f9-e2dc223d8862 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.762199] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.762755] env[63028]: DEBUG nova.compute.manager [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1058.766025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 14.716s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.799753] env[63028]: DEBUG nova.network.neutron [-] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.892847] env[63028]: DEBUG nova.network.neutron [-] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.043517] env[63028]: INFO nova.compute.manager [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Took 23.19 seconds to build instance. [ 1059.270293] env[63028]: INFO nova.compute.claims [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1059.275073] env[63028]: DEBUG nova.compute.utils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1059.276458] env[63028]: DEBUG nova.compute.manager [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1059.276619] env[63028]: DEBUG nova.network.neutron [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1059.303269] env[63028]: INFO nova.compute.manager [-] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Took 1.36 seconds to deallocate network for instance. [ 1059.347550] env[63028]: DEBUG nova.policy [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c022ca18b0a41ce9d790fa25f6ebf8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea26842446ec4691a6456a6659188704', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1059.395909] env[63028]: INFO nova.compute.manager [-] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Took 1.28 seconds to deallocate network for instance. [ 1059.544647] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5eecdbbb-21b2-495d-8c7c-e12748cfd367 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "e048cadf-9dc1-4eb7-a825-422d0736231c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.701s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.576392] env[63028]: DEBUG nova.compute.manager [req-b1b6b59b-e1f7-42c8-8a3c-950b713e3b3f req-0966a1a6-4f18-469d-b2b2-4044a6cab7b2 service nova] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Received event network-vif-deleted-4d0af6d0-21b5-49d7-a857-43d30a0a2514 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1059.576602] env[63028]: DEBUG nova.compute.manager [req-b1b6b59b-e1f7-42c8-8a3c-950b713e3b3f req-0966a1a6-4f18-469d-b2b2-4044a6cab7b2 service nova] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Received event network-vif-deleted-f2d3b19c-6db9-4224-812a-45ec031221f0 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1059.779430] env[63028]: INFO nova.compute.resource_tracker [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating resource usage from migration 29632012-2d70-44ac-b011-da63d2c5ae9c [ 1059.783455] env[63028]: DEBUG nova.compute.manager [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1059.820143] env[63028]: DEBUG oslo_concurrency.lockutils [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.863612] env[63028]: DEBUG nova.network.neutron [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Successfully created port: 00aeadf5-50e8-433f-a1da-81ac2a8f259f {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1059.901531] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.101477] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e098e1-9b8f-4b0b-8c5d-9a5296f1b625 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.109695] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30e3e34-c0b5-4850-a1c3-28ee5b15e598 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.142390] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9730e4-f5e3-428e-ba98-d5ac0fa736ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.150326] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741a91c8-2d73-44b8-9f68-0e2945fef960 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.165971] env[63028]: DEBUG nova.compute.provider_tree [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.672150] env[63028]: DEBUG nova.scheduler.client.report [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1060.795965] env[63028]: DEBUG nova.compute.manager [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1060.834636] env[63028]: DEBUG nova.virt.hardware [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1060.834934] env[63028]: DEBUG nova.virt.hardware [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1060.835114] env[63028]: DEBUG nova.virt.hardware [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1060.835305] env[63028]: DEBUG nova.virt.hardware [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1060.835457] env[63028]: DEBUG nova.virt.hardware [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1060.835607] env[63028]: DEBUG nova.virt.hardware [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1060.835861] env[63028]: DEBUG nova.virt.hardware [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1060.836053] env[63028]: DEBUG nova.virt.hardware [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1060.836154] env[63028]: DEBUG nova.virt.hardware [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1060.836323] env[63028]: DEBUG nova.virt.hardware [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1060.836500] env[63028]: DEBUG nova.virt.hardware [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1060.837478] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d92e7d6-b8c2-4b4a-95a1-80171f40f621 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.847964] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4686221b-76a4-4a66-8872-5bb023297eeb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.180213] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.414s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.180476] env[63028]: INFO nova.compute.manager [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Migrating [ 1061.194120] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.459s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.194972] env[63028]: DEBUG nova.objects.instance [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Lazy-loading 'resources' on Instance uuid 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1061.603145] env[63028]: DEBUG nova.compute.manager [req-69cc87d4-1b4f-4501-8e49-a06144e8fad6 req-2043e7e9-7819-4169-b157-57fe4695e197 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Received event network-changed-60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1061.603279] env[63028]: DEBUG nova.compute.manager [req-69cc87d4-1b4f-4501-8e49-a06144e8fad6 req-2043e7e9-7819-4169-b157-57fe4695e197 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Refreshing instance network info cache due to event network-changed-60891063-6c30-480a-8e2b-f3960496f2fd. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1061.603505] env[63028]: DEBUG oslo_concurrency.lockutils [req-69cc87d4-1b4f-4501-8e49-a06144e8fad6 req-2043e7e9-7819-4169-b157-57fe4695e197 service nova] Acquiring lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.603653] env[63028]: DEBUG oslo_concurrency.lockutils [req-69cc87d4-1b4f-4501-8e49-a06144e8fad6 req-2043e7e9-7819-4169-b157-57fe4695e197 service nova] Acquired lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.603815] env[63028]: DEBUG nova.network.neutron [req-69cc87d4-1b4f-4501-8e49-a06144e8fad6 req-2043e7e9-7819-4169-b157-57fe4695e197 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Refreshing network info cache for port 60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1061.708715] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.708997] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.709200] env[63028]: DEBUG nova.network.neutron [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1061.857693] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "899496ae-8463-42e0-a287-b141d956fa0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.857933] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "899496ae-8463-42e0-a287-b141d956fa0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.071603] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2b420f-87b5-4662-845c-671c2b1b2f00 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.079863] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea78151-eb24-476d-ae79-ddf34607f872 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.115973] env[63028]: DEBUG nova.network.neutron [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Successfully updated port: 00aeadf5-50e8-433f-a1da-81ac2a8f259f {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1062.118196] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b6d40e-2fc5-4b5e-9a20-5781e4a5748f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.127092] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fa62d4-0136-4359-b392-494f0e229418 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.142992] env[63028]: DEBUG nova.compute.provider_tree [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1062.360399] env[63028]: DEBUG nova.compute.manager [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1062.498043] env[63028]: DEBUG nova.network.neutron [req-69cc87d4-1b4f-4501-8e49-a06144e8fad6 req-2043e7e9-7819-4169-b157-57fe4695e197 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updated VIF entry in instance network info cache for port 60891063-6c30-480a-8e2b-f3960496f2fd. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1062.498442] env[63028]: DEBUG nova.network.neutron [req-69cc87d4-1b4f-4501-8e49-a06144e8fad6 req-2043e7e9-7819-4169-b157-57fe4695e197 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updating instance_info_cache with network_info: [{"id": "60891063-6c30-480a-8e2b-f3960496f2fd", "address": "fa:16:3e:84:9a:c5", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60891063-6c", "ovs_interfaceid": "60891063-6c30-480a-8e2b-f3960496f2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.579023] env[63028]: DEBUG nova.network.neutron [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance_info_cache with network_info: [{"id": "c5f1d585-d624-4525-a5b2-132b18bf9378", "address": "fa:16:3e:93:da:98", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5f1d585-d6", "ovs_interfaceid": "c5f1d585-d624-4525-a5b2-132b18bf9378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.622788] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "refresh_cache-2c2fb165-8906-4d42-a839-89ea6c8814ab" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.622788] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "refresh_cache-2c2fb165-8906-4d42-a839-89ea6c8814ab" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.622788] env[63028]: DEBUG nova.network.neutron [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1062.646454] env[63028]: DEBUG nova.scheduler.client.report [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1062.786611] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Acquiring lock "2add1602-122e-41d7-af83-b71d8dab9288" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.786854] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Lock "2add1602-122e-41d7-af83-b71d8dab9288" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.882666] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.001418] env[63028]: DEBUG oslo_concurrency.lockutils [req-69cc87d4-1b4f-4501-8e49-a06144e8fad6 req-2043e7e9-7819-4169-b157-57fe4695e197 service nova] Releasing lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.087450] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.151018] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.957s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.153296] env[63028]: DEBUG oslo_concurrency.lockutils [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.865s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.153522] env[63028]: DEBUG nova.objects.instance [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lazy-loading 'resources' on Instance uuid a50e1167-d8ed-4099-83c3-a5066ab0be1f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1063.155914] env[63028]: DEBUG nova.network.neutron [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1063.176464] env[63028]: INFO nova.scheduler.client.report [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Deleted allocations for instance 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7 [ 1063.288881] env[63028]: DEBUG nova.compute.manager [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1063.333866] env[63028]: DEBUG nova.network.neutron [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Updating instance_info_cache with network_info: [{"id": "00aeadf5-50e8-433f-a1da-81ac2a8f259f", "address": "fa:16:3e:91:ac:fb", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00aeadf5-50", "ovs_interfaceid": "00aeadf5-50e8-433f-a1da-81ac2a8f259f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.693820] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a4b285e8-f524-4cef-8d89-ff6c6535982a tempest-ServersV294TestFqdnHostnames-76037487 tempest-ServersV294TestFqdnHostnames-76037487-project-member] Lock "53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.155s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.699200] env[63028]: DEBUG nova.compute.manager [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Received event network-vif-plugged-00aeadf5-50e8-433f-a1da-81ac2a8f259f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1063.699200] env[63028]: DEBUG oslo_concurrency.lockutils [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] Acquiring lock "2c2fb165-8906-4d42-a839-89ea6c8814ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.699200] env[63028]: DEBUG oslo_concurrency.lockutils [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] Lock "2c2fb165-8906-4d42-a839-89ea6c8814ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.699200] env[63028]: DEBUG oslo_concurrency.lockutils [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] Lock "2c2fb165-8906-4d42-a839-89ea6c8814ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.699200] env[63028]: DEBUG nova.compute.manager [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] No waiting events found dispatching network-vif-plugged-00aeadf5-50e8-433f-a1da-81ac2a8f259f {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1063.699200] env[63028]: WARNING nova.compute.manager [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Received unexpected event network-vif-plugged-00aeadf5-50e8-433f-a1da-81ac2a8f259f for instance with vm_state building and task_state spawning. [ 1063.699200] env[63028]: DEBUG nova.compute.manager [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Received event network-changed-00aeadf5-50e8-433f-a1da-81ac2a8f259f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1063.699200] env[63028]: DEBUG nova.compute.manager [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Refreshing instance network info cache due to event network-changed-00aeadf5-50e8-433f-a1da-81ac2a8f259f. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1063.699200] env[63028]: DEBUG oslo_concurrency.lockutils [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] Acquiring lock "refresh_cache-2c2fb165-8906-4d42-a839-89ea6c8814ab" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.805987] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.836456] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "refresh_cache-2c2fb165-8906-4d42-a839-89ea6c8814ab" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.836833] env[63028]: DEBUG nova.compute.manager [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Instance network_info: |[{"id": "00aeadf5-50e8-433f-a1da-81ac2a8f259f", "address": "fa:16:3e:91:ac:fb", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00aeadf5-50", "ovs_interfaceid": "00aeadf5-50e8-433f-a1da-81ac2a8f259f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1063.837081] env[63028]: DEBUG oslo_concurrency.lockutils [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] Acquired lock "refresh_cache-2c2fb165-8906-4d42-a839-89ea6c8814ab" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.837266] env[63028]: DEBUG nova.network.neutron [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Refreshing network info cache for port 00aeadf5-50e8-433f-a1da-81ac2a8f259f {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1063.838913] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:ac:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '706c9762-1cf8-4770-897d-377d0d927773', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00aeadf5-50e8-433f-a1da-81ac2a8f259f', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1063.846524] env[63028]: DEBUG oslo.service.loopingcall [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1063.851755] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1063.852430] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04e975db-83e7-4b67-a73f-9435825bd753 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.875760] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1063.875760] env[63028]: value = "task-2736213" [ 1063.875760] env[63028]: _type = "Task" [ 1063.875760] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.884631] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736213, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.010136] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e603ae9-6e80-4342-9adf-9d88e54d9c5f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.017530] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92dd696-3a16-4f4c-adb7-19b1796b9b24 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.053670] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d4fc53-ccd3-426d-9c2e-4ccd7503fdb2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.061510] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d9b70b-0d8c-4d50-8c15-a76bd4f83333 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.076264] env[63028]: DEBUG nova.compute.provider_tree [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1064.140325] env[63028]: DEBUG nova.network.neutron [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Updated VIF entry in instance network info cache for port 00aeadf5-50e8-433f-a1da-81ac2a8f259f. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1064.140804] env[63028]: DEBUG nova.network.neutron [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Updating instance_info_cache with network_info: [{"id": "00aeadf5-50e8-433f-a1da-81ac2a8f259f", "address": "fa:16:3e:91:ac:fb", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00aeadf5-50", "ovs_interfaceid": "00aeadf5-50e8-433f-a1da-81ac2a8f259f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.386043] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736213, 'name': CreateVM_Task, 'duration_secs': 0.296281} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.386221] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1064.387123] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.387299] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.387813] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1064.388086] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d59df225-cd67-4a65-bef3-9c5b75f5dbc6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.393449] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1064.393449] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a65014-038e-a71e-3eaf-df3a14aabe49" [ 1064.393449] env[63028]: _type = "Task" [ 1064.393449] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.400781] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a65014-038e-a71e-3eaf-df3a14aabe49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.579549] env[63028]: DEBUG nova.scheduler.client.report [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1064.607633] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3dae7b-f015-4c7e-b349-6709b377352e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.629280] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance 'd41a1eae-bb89-4222-9466-d86af891c654' progress to 0 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1064.644226] env[63028]: DEBUG oslo_concurrency.lockutils [req-30a2c8ac-823a-484f-9743-1e2eb44f8c84 req-d9d68db2-25df-4024-8742-36081871143b service nova] Releasing lock "refresh_cache-2c2fb165-8906-4d42-a839-89ea6c8814ab" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.905227] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a65014-038e-a71e-3eaf-df3a14aabe49, 'name': SearchDatastore_Task, 'duration_secs': 0.011967} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.905562] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.905803] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1064.906045] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.906202] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.906387] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1064.906733] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fe6533f-3ac4-4748-b121-382b3e81a57d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.916307] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1064.916486] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1064.917228] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b829f52-b860-46b1-87b0-78d6154f538b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.923038] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1064.923038] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526d1068-e4ce-ea49-c7d0-ca01bddcfd50" [ 1064.923038] env[63028]: _type = "Task" [ 1064.923038] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.930455] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526d1068-e4ce-ea49-c7d0-ca01bddcfd50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.084816] env[63028]: DEBUG oslo_concurrency.lockutils [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.931s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1065.087278] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.611s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.088753] env[63028]: INFO nova.compute.claims [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1065.112785] env[63028]: INFO nova.scheduler.client.report [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Deleted allocations for instance a50e1167-d8ed-4099-83c3-a5066ab0be1f [ 1065.135936] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1065.137186] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22dd84e9-ba41-41c5-a5a8-b36b1bc5258d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.145090] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1065.145090] env[63028]: value = "task-2736214" [ 1065.145090] env[63028]: _type = "Task" [ 1065.145090] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.154130] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736214, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.433703] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526d1068-e4ce-ea49-c7d0-ca01bddcfd50, 'name': SearchDatastore_Task, 'duration_secs': 0.011223} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.434596] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e379c826-d5a1-4897-81fe-5fba6a92eaa0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.439634] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1065.439634] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bb870e-73aa-acf1-a647-1285be057c3a" [ 1065.439634] env[63028]: _type = "Task" [ 1065.439634] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.446994] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bb870e-73aa-acf1-a647-1285be057c3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.620749] env[63028]: DEBUG oslo_concurrency.lockutils [None req-92e3eef4-1898-4aab-9a30-727634119e63 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "a50e1167-d8ed-4099-83c3-a5066ab0be1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.225s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1065.655048] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736214, 'name': PowerOffVM_Task, 'duration_secs': 0.206735} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.655203] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1065.655334] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance 'd41a1eae-bb89-4222-9466-d86af891c654' progress to 17 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1065.952810] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bb870e-73aa-acf1-a647-1285be057c3a, 'name': SearchDatastore_Task, 'duration_secs': 0.009125} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.953092] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1065.953239] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 2c2fb165-8906-4d42-a839-89ea6c8814ab/2c2fb165-8906-4d42-a839-89ea6c8814ab.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1065.953534] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a42c8bb-2752-4920-8ffc-b2578a5b33b6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.963918] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1065.963918] env[63028]: value = "task-2736215" [ 1065.963918] env[63028]: _type = "Task" [ 1065.963918] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.974188] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736215, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.163793] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1066.164160] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1066.164406] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1066.164688] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1066.164858] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1066.165080] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1066.165376] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1066.165602] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1066.165835] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1066.166084] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1066.166339] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1066.175767] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b29386f-8e08-4260-acf4-bda1e7dc69ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.202432] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1066.202432] env[63028]: value = "task-2736216" [ 1066.202432] env[63028]: _type = "Task" [ 1066.202432] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.217663] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736216, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.463926] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1ba272-36f8-4fbc-91af-012f27a23f73 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.475413] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6114ec-eb68-4cf2-a109-bc2d0432b049 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.483675] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736215, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.511643] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e184e4-b1d8-4ac7-be02-0906aeac837d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.519186] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2f5ae1-a7b4-4470-917c-6c5781a83f19 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.532866] env[63028]: DEBUG nova.compute.provider_tree [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.712676] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736216, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.976977] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736215, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539977} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.978011] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 2c2fb165-8906-4d42-a839-89ea6c8814ab/2c2fb165-8906-4d42-a839-89ea6c8814ab.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1066.978011] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1066.978011] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-43825b95-5000-45fb-ba77-ed3dd0122af2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.983866] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1066.983866] env[63028]: value = "task-2736217" [ 1066.983866] env[63028]: _type = "Task" [ 1066.983866] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.992203] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736217, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.036451] env[63028]: DEBUG nova.scheduler.client.report [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1067.212942] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736216, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.497307] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736217, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062831} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.497578] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1067.498599] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea5e9d4-7781-47ea-8061-f44c126987f4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.521164] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 2c2fb165-8906-4d42-a839-89ea6c8814ab/2c2fb165-8906-4d42-a839-89ea6c8814ab.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1067.521729] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0dc51cf-1a1b-4b9c-a6f3-27dbbba0661f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.541085] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.541594] env[63028]: DEBUG nova.compute.manager [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1067.545039] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1067.545039] env[63028]: value = "task-2736218" [ 1067.545039] env[63028]: _type = "Task" [ 1067.545039] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.545544] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 12.897s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.555851] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736218, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.713216] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736216, 'name': ReconfigVM_Task, 'duration_secs': 1.302328} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.714042] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance 'd41a1eae-bb89-4222-9466-d86af891c654' progress to 33 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1067.920668] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.920913] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.980038] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.980464] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.046464] env[63028]: DEBUG nova.compute.utils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1068.047957] env[63028]: DEBUG nova.compute.manager [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1068.048095] env[63028]: DEBUG nova.network.neutron [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1068.062877] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736218, 'name': ReconfigVM_Task, 'duration_secs': 0.4328} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.063200] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 2c2fb165-8906-4d42-a839-89ea6c8814ab/2c2fb165-8906-4d42-a839-89ea6c8814ab.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1068.064031] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20650fa0-8400-4715-a12d-74a441689076 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.070270] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1068.070270] env[63028]: value = "task-2736219" [ 1068.070270] env[63028]: _type = "Task" [ 1068.070270] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.080028] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736219, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.120833] env[63028]: DEBUG nova.policy [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3ed8f5b3d7b4be99d3b4649e156af58', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '847e89af959a4266ab55c1d2106ba8fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1068.220241] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1068.220241] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1068.220590] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1068.220713] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1068.220903] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1068.221100] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1068.221385] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1068.221590] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1068.221820] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1068.222107] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1068.222319] env[63028]: DEBUG nova.virt.hardware [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1068.227776] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfiguring VM instance instance-0000005a to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1068.230561] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-108998cf-fedd-41ca-aa1f-5d0011ce6a46 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.252697] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1068.252697] env[63028]: value = "task-2736220" [ 1068.252697] env[63028]: _type = "Task" [ 1068.252697] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.264486] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736220, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.412628] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfe2c02-1a83-4116-ad78-a4a071685acb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.420646] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15959b4b-418d-4f7f-aa2b-2122c9c2f19d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.425980] env[63028]: DEBUG nova.compute.manager [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1068.457144] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6276f4ca-9db9-4d5a-82d0-fea5ee896e20 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.464770] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2abd435-e35b-466c-8391-45ab302b3fa5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.478787] env[63028]: DEBUG nova.compute.provider_tree [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.481896] env[63028]: DEBUG nova.network.neutron [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Successfully created port: 78ea2d63-5ca1-4e37-808c-688a7c0fc30e {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1068.485235] env[63028]: INFO nova.compute.manager [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Detaching volume 996ec8ec-5318-4963-9384-330f1e01190d [ 1068.519936] env[63028]: INFO nova.virt.block_device [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Attempting to driver detach volume 996ec8ec-5318-4963-9384-330f1e01190d from mountpoint /dev/sdb [ 1068.520191] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1068.520384] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550837', 'volume_id': '996ec8ec-5318-4963-9384-330f1e01190d', 'name': 'volume-996ec8ec-5318-4963-9384-330f1e01190d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '85aafadb-81d6-4687-aed1-fbe829e5f95f', 'attached_at': '', 'detached_at': '', 'volume_id': '996ec8ec-5318-4963-9384-330f1e01190d', 'serial': '996ec8ec-5318-4963-9384-330f1e01190d'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1068.521279] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeac1454-00b8-4629-abe3-f007a05c9a80 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.543640] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218ca2d4-7ea2-47e2-87ff-7cd45c080a69 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.550274] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07434eb8-8544-4996-9522-4b538a14a5a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.552848] env[63028]: DEBUG nova.compute.manager [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1068.579025] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f30908-ba4c-4ccd-bfcc-52a9c688421f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.582893] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736219, 'name': Rename_Task, 'duration_secs': 0.196664} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.593291] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1068.593616] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] The volume has not been displaced from its original location: [datastore2] volume-996ec8ec-5318-4963-9384-330f1e01190d/volume-996ec8ec-5318-4963-9384-330f1e01190d.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1068.599059] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Reconfiguring VM instance instance-00000036 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1068.599917] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4d5111a-94c5-4afa-8052-9fd83258d075 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.601559] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e66d859-b051-43cf-8cb0-ff72dea80d2e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.618715] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1068.618715] env[63028]: value = "task-2736221" [ 1068.618715] env[63028]: _type = "Task" [ 1068.618715] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.619947] env[63028]: DEBUG oslo_vmware.api [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1068.619947] env[63028]: value = "task-2736222" [ 1068.619947] env[63028]: _type = "Task" [ 1068.619947] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.629966] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736221, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.633305] env[63028]: DEBUG oslo_vmware.api [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736222, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.764764] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736220, 'name': ReconfigVM_Task, 'duration_secs': 0.273716} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.764999] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfigured VM instance instance-0000005a to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1068.766675] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf61691-f48e-4c2a-a79f-5becc6c8762b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.791704] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] d41a1eae-bb89-4222-9466-d86af891c654/d41a1eae-bb89-4222-9466-d86af891c654.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1068.792041] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f1a169c-da70-4bb3-a514-d5baf8a0489e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.810230] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1068.810230] env[63028]: value = "task-2736223" [ 1068.810230] env[63028]: _type = "Task" [ 1068.810230] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.819162] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736223, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.976878] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.984988] env[63028]: DEBUG nova.scheduler.client.report [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1069.135613] env[63028]: DEBUG oslo_vmware.api [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736222, 'name': ReconfigVM_Task, 'duration_secs': 0.2437} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.137316] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Reconfigured VM instance instance-00000036 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1069.141770] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736221, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.142041] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61740ece-a108-4cea-a8c6-13952602b48b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.157411] env[63028]: DEBUG oslo_vmware.api [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1069.157411] env[63028]: value = "task-2736224" [ 1069.157411] env[63028]: _type = "Task" [ 1069.157411] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.166683] env[63028]: DEBUG oslo_vmware.api [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736224, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.319713] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736223, 'name': ReconfigVM_Task, 'duration_secs': 0.471537} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.320012] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfigured VM instance instance-0000005a to attach disk [datastore1] d41a1eae-bb89-4222-9466-d86af891c654/d41a1eae-bb89-4222-9466-d86af891c654.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1069.320305] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance 'd41a1eae-bb89-4222-9466-d86af891c654' progress to 50 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1069.562769] env[63028]: DEBUG nova.compute.manager [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1069.589774] env[63028]: DEBUG nova.virt.hardware [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1069.590054] env[63028]: DEBUG nova.virt.hardware [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1069.590226] env[63028]: DEBUG nova.virt.hardware [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1069.590412] env[63028]: DEBUG nova.virt.hardware [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1069.590562] env[63028]: DEBUG nova.virt.hardware [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1069.590711] env[63028]: DEBUG nova.virt.hardware [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1069.591145] env[63028]: DEBUG nova.virt.hardware [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1069.591417] env[63028]: DEBUG nova.virt.hardware [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1069.591611] env[63028]: DEBUG nova.virt.hardware [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1069.591782] env[63028]: DEBUG nova.virt.hardware [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1069.591958] env[63028]: DEBUG nova.virt.hardware [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1069.592906] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fe3680-4b8d-43d9-8d95-dde5fd12c134 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.601123] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170418d0-50de-41b7-8991-69cb52a471aa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.627390] env[63028]: DEBUG oslo_vmware.api [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736221, 'name': PowerOnVM_Task, 'duration_secs': 0.546774} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.627634] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1069.627838] env[63028]: INFO nova.compute.manager [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Took 8.83 seconds to spawn the instance on the hypervisor. [ 1069.628033] env[63028]: DEBUG nova.compute.manager [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1069.628752] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac340676-8bf6-4569-8226-dc55cc7f10f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.666248] env[63028]: DEBUG oslo_vmware.api [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736224, 'name': ReconfigVM_Task, 'duration_secs': 0.141884} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.667209] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550837', 'volume_id': '996ec8ec-5318-4963-9384-330f1e01190d', 'name': 'volume-996ec8ec-5318-4963-9384-330f1e01190d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '85aafadb-81d6-4687-aed1-fbe829e5f95f', 'attached_at': '', 'detached_at': '', 'volume_id': '996ec8ec-5318-4963-9384-330f1e01190d', 'serial': '996ec8ec-5318-4963-9384-330f1e01190d'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1069.827157] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0714824-3849-4a48-9723-f27bbd1bc27d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.850126] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77347ee-12f9-4d26-8f20-a6c23c2dafac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.856409] env[63028]: DEBUG nova.compute.manager [req-27917228-58d8-4e82-9a7f-56d303c1faad req-855f7b19-4d4d-4c46-8ede-b3f5be571185 service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Received event network-vif-plugged-78ea2d63-5ca1-4e37-808c-688a7c0fc30e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1069.856648] env[63028]: DEBUG oslo_concurrency.lockutils [req-27917228-58d8-4e82-9a7f-56d303c1faad req-855f7b19-4d4d-4c46-8ede-b3f5be571185 service nova] Acquiring lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.856850] env[63028]: DEBUG oslo_concurrency.lockutils [req-27917228-58d8-4e82-9a7f-56d303c1faad req-855f7b19-4d4d-4c46-8ede-b3f5be571185 service nova] Lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.857029] env[63028]: DEBUG oslo_concurrency.lockutils [req-27917228-58d8-4e82-9a7f-56d303c1faad req-855f7b19-4d4d-4c46-8ede-b3f5be571185 service nova] Lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.857203] env[63028]: DEBUG nova.compute.manager [req-27917228-58d8-4e82-9a7f-56d303c1faad req-855f7b19-4d4d-4c46-8ede-b3f5be571185 service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] No waiting events found dispatching network-vif-plugged-78ea2d63-5ca1-4e37-808c-688a7c0fc30e {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1069.857367] env[63028]: WARNING nova.compute.manager [req-27917228-58d8-4e82-9a7f-56d303c1faad req-855f7b19-4d4d-4c46-8ede-b3f5be571185 service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Received unexpected event network-vif-plugged-78ea2d63-5ca1-4e37-808c-688a7c0fc30e for instance with vm_state building and task_state spawning. [ 1069.874125] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance 'd41a1eae-bb89-4222-9466-d86af891c654' progress to 67 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1069.957149] env[63028]: DEBUG nova.network.neutron [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Successfully updated port: 78ea2d63-5ca1-4e37-808c-688a7c0fc30e {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1069.995995] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.450s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.998903] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.585s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.145556] env[63028]: INFO nova.compute.manager [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Took 27.38 seconds to build instance. [ 1070.211959] env[63028]: DEBUG nova.objects.instance [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lazy-loading 'flavor' on Instance uuid 85aafadb-81d6-4687-aed1-fbe829e5f95f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.460320] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "refresh_cache-4ec96b68-2fdb-4150-8d26-53fdf79c8e26" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1070.460471] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "refresh_cache-4ec96b68-2fdb-4150-8d26-53fdf79c8e26" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.460627] env[63028]: DEBUG nova.network.neutron [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1070.575757] env[63028]: INFO nova.scheduler.client.report [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted allocation for migration 17ac8e61-a974-40df-9f8f-cfcc2c503ee0 [ 1070.647218] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f70752c5-7328-401a-9ff3-9b6723799af8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "2c2fb165-8906-4d42-a839-89ea6c8814ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.895s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.999823] env[63028]: DEBUG nova.network.neutron [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1071.015410] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Applying migration context for instance d41a1eae-bb89-4222-9466-d86af891c654 as it has an incoming, in-progress migration 29632012-2d70-44ac-b011-da63d2c5ae9c. Migration status is migrating {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1071.017184] env[63028]: INFO nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating resource usage from migration 29632012-2d70-44ac-b011-da63d2c5ae9c [ 1071.042264] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance d6137c80-0c09-4655-b264-472753b4fa9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.042406] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.042576] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 79f4ef22-a589-4d5c-8832-5d5dcdd55561 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.042705] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 1d008794-3c1a-46c6-b4eb-3d5441efdb22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.042835] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance f0ca0d73-d428-4b8c-acac-a80b7b7dd793 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.042978] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 8bb61bfa-d44e-4e06-867a-445d9b3db660 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1071.043132] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 56d6982d-9f76-4952-8c8b-f64b3c8d02fe is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1071.043254] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 85aafadb-81d6-4687-aed1-fbe829e5f95f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.043370] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance b3930760-1888-4f80-85d8-65120a25f275 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.043499] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance f804ec95-0b97-4960-844d-b678b97fc401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.043721] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.043721] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 60d18f14-536a-4b0f-912b-21f3f5a30d28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.043847] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 3566ab6f-1f8a-472d-9efb-47fa2520a215 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1071.043969] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 1cf111f2-df5e-48a6-905a-bc2d3ea45202 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1071.044113] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance e048cadf-9dc1-4eb7-a825-422d0736231c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.044230] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 2c2fb165-8906-4d42-a839-89ea6c8814ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.044398] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Migration 29632012-2d70-44ac-b011-da63d2c5ae9c is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1071.044533] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance d41a1eae-bb89-4222-9466-d86af891c654 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.044647] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 4ec96b68-2fdb-4150-8d26-53fdf79c8e26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.081741] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8cd7b3f4-3be4-4b37-9fea-533f26d5aab9 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 20.012s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.145108] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "2c2fb165-8906-4d42-a839-89ea6c8814ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.145373] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "2c2fb165-8906-4d42-a839-89ea6c8814ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.145575] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "2c2fb165-8906-4d42-a839-89ea6c8814ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.145755] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "2c2fb165-8906-4d42-a839-89ea6c8814ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.145926] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "2c2fb165-8906-4d42-a839-89ea6c8814ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.149937] env[63028]: INFO nova.compute.manager [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Terminating instance [ 1071.208620] env[63028]: DEBUG nova.network.neutron [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Updating instance_info_cache with network_info: [{"id": "78ea2d63-5ca1-4e37-808c-688a7c0fc30e", "address": "fa:16:3e:d6:65:0c", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78ea2d63-5c", "ovs_interfaceid": "78ea2d63-5ca1-4e37-808c-688a7c0fc30e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.221656] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6068c357-2660-4546-a2f1-f2db3e3b22d8 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.240s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.429040] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.431769] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.431769] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.431769] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.431769] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.441207] env[63028]: INFO nova.compute.manager [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Terminating instance [ 1071.525892] env[63028]: DEBUG nova.network.neutron [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Port c5f1d585-d624-4525-a5b2-132b18bf9378 binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1071.547761] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 1f8415cc-f544-4c89-9863-43d5ae9144e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.654656] env[63028]: DEBUG nova.compute.manager [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1071.654948] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1071.656037] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275e67f3-2b9f-4b11-b306-570b46597679 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.664612] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1071.665731] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4dda18ea-70c2-40a6-81db-0733ebbcad3f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.671517] env[63028]: DEBUG oslo_vmware.api [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1071.671517] env[63028]: value = "task-2736225" [ 1071.671517] env[63028]: _type = "Task" [ 1071.671517] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.680670] env[63028]: DEBUG oslo_vmware.api [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.710909] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "refresh_cache-4ec96b68-2fdb-4150-8d26-53fdf79c8e26" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.711282] env[63028]: DEBUG nova.compute.manager [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Instance network_info: |[{"id": "78ea2d63-5ca1-4e37-808c-688a7c0fc30e", "address": "fa:16:3e:d6:65:0c", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78ea2d63-5c", "ovs_interfaceid": "78ea2d63-5ca1-4e37-808c-688a7c0fc30e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1071.711712] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:65:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78ea2d63-5ca1-4e37-808c-688a7c0fc30e', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1071.719499] env[63028]: DEBUG oslo.service.loopingcall [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1071.719723] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1071.719992] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-acc02d44-87bc-4170-b325-0f3fdfc33a0a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.740289] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1071.740289] env[63028]: value = "task-2736226" [ 1071.740289] env[63028]: _type = "Task" [ 1071.740289] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.748142] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736226, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.927062] env[63028]: DEBUG nova.compute.manager [req-b86a4894-1633-4986-aa3b-53a3530a7eba req-86f5d11b-24e6-40b5-b6ae-b2df41a549dd service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Received event network-changed-78ea2d63-5ca1-4e37-808c-688a7c0fc30e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1071.927291] env[63028]: DEBUG nova.compute.manager [req-b86a4894-1633-4986-aa3b-53a3530a7eba req-86f5d11b-24e6-40b5-b6ae-b2df41a549dd service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Refreshing instance network info cache due to event network-changed-78ea2d63-5ca1-4e37-808c-688a7c0fc30e. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1071.927544] env[63028]: DEBUG oslo_concurrency.lockutils [req-b86a4894-1633-4986-aa3b-53a3530a7eba req-86f5d11b-24e6-40b5-b6ae-b2df41a549dd service nova] Acquiring lock "refresh_cache-4ec96b68-2fdb-4150-8d26-53fdf79c8e26" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.927702] env[63028]: DEBUG oslo_concurrency.lockutils [req-b86a4894-1633-4986-aa3b-53a3530a7eba req-86f5d11b-24e6-40b5-b6ae-b2df41a549dd service nova] Acquired lock "refresh_cache-4ec96b68-2fdb-4150-8d26-53fdf79c8e26" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.928339] env[63028]: DEBUG nova.network.neutron [req-b86a4894-1633-4986-aa3b-53a3530a7eba req-86f5d11b-24e6-40b5-b6ae-b2df41a549dd service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Refreshing network info cache for port 78ea2d63-5ca1-4e37-808c-688a7c0fc30e {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1071.943962] env[63028]: DEBUG nova.compute.manager [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1071.944118] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1071.944986] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ab015b-05e1-464d-97cf-8407242eb04c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.953918] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1071.955458] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8cecd076-1ebe-477e-b593-cced479f2639 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.962613] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "a7ff444e-43bc-4925-9754-86ff30de6751" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.962687] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "a7ff444e-43bc-4925-9754-86ff30de6751" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.969628] env[63028]: DEBUG oslo_vmware.api [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1071.969628] env[63028]: value = "task-2736227" [ 1071.969628] env[63028]: _type = "Task" [ 1071.969628] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.982165] env[63028]: DEBUG oslo_vmware.api [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736227, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.051058] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 899496ae-8463-42e0-a287-b141d956fa0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.180904] env[63028]: DEBUG oslo_vmware.api [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736225, 'name': PowerOffVM_Task, 'duration_secs': 0.200642} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.181116] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1072.181290] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1072.181534] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f398f88-91c4-4d68-827b-f93503104afa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.246986] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1072.247240] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1072.247455] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleting the datastore file [datastore1] 2c2fb165-8906-4d42-a839-89ea6c8814ab {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1072.247669] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0fd9023-75d2-4a22-9eb5-5d7ebbcd9a6a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.252448] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736226, 'name': CreateVM_Task, 'duration_secs': 0.309128} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.252925] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1072.253621] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.253786] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.254101] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1072.254376] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b0bc7c3-bdea-4263-a596-369a28148b1b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.257600] env[63028]: DEBUG oslo_vmware.api [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1072.257600] env[63028]: value = "task-2736229" [ 1072.257600] env[63028]: _type = "Task" [ 1072.257600] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.258763] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1072.258763] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e57e4a-1174-eed6-13d5-6322af260721" [ 1072.258763] env[63028]: _type = "Task" [ 1072.258763] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.269401] env[63028]: DEBUG oslo_vmware.api [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.273327] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e57e4a-1174-eed6-13d5-6322af260721, 'name': SearchDatastore_Task, 'duration_secs': 0.009528} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.273654] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.273945] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1072.274395] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.274481] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.275012] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1072.275012] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f10f22e3-39b9-477c-bc5a-0ab68bf9803b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.282293] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1072.282531] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1072.283364] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64ed004e-d25c-4abe-8a55-0054e18c99b9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.288499] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1072.288499] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b3e8b4-be09-3cb8-95cd-f2cc54c7b45c" [ 1072.288499] env[63028]: _type = "Task" [ 1072.288499] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.296290] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b3e8b4-be09-3cb8-95cd-f2cc54c7b45c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.335041] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.335428] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.335722] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "85aafadb-81d6-4687-aed1-fbe829e5f95f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.335992] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.336279] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.338507] env[63028]: INFO nova.compute.manager [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Terminating instance [ 1072.465123] env[63028]: DEBUG nova.compute.manager [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1072.478654] env[63028]: DEBUG oslo_vmware.api [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736227, 'name': PowerOffVM_Task, 'duration_secs': 0.184675} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.478923] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1072.479133] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1072.479642] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7c6aa3f-5efb-4157-af29-12567547ca64 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.545277] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1072.545446] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1072.545567] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleting the datastore file [datastore2] 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1072.548389] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9998df1-55a8-4684-8d24-33bcd2902082 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.553922] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "d41a1eae-bb89-4222-9466-d86af891c654-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.554158] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "d41a1eae-bb89-4222-9466-d86af891c654-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.554378] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "d41a1eae-bb89-4222-9466-d86af891c654-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.556147] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 2add1602-122e-41d7-af83-b71d8dab9288 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.564081] env[63028]: DEBUG oslo_vmware.api [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1072.564081] env[63028]: value = "task-2736231" [ 1072.564081] env[63028]: _type = "Task" [ 1072.564081] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.573991] env[63028]: DEBUG oslo_vmware.api [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736231, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.627222] env[63028]: DEBUG nova.network.neutron [req-b86a4894-1633-4986-aa3b-53a3530a7eba req-86f5d11b-24e6-40b5-b6ae-b2df41a549dd service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Updated VIF entry in instance network info cache for port 78ea2d63-5ca1-4e37-808c-688a7c0fc30e. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1072.627634] env[63028]: DEBUG nova.network.neutron [req-b86a4894-1633-4986-aa3b-53a3530a7eba req-86f5d11b-24e6-40b5-b6ae-b2df41a549dd service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Updating instance_info_cache with network_info: [{"id": "78ea2d63-5ca1-4e37-808c-688a7c0fc30e", "address": "fa:16:3e:d6:65:0c", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78ea2d63-5c", "ovs_interfaceid": "78ea2d63-5ca1-4e37-808c-688a7c0fc30e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.768890] env[63028]: DEBUG oslo_vmware.api [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178718} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.769773] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1072.769773] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1072.769773] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1072.769935] env[63028]: INFO nova.compute.manager [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1072.770055] env[63028]: DEBUG oslo.service.loopingcall [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1072.770245] env[63028]: DEBUG nova.compute.manager [-] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1072.770338] env[63028]: DEBUG nova.network.neutron [-] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1072.797950] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b3e8b4-be09-3cb8-95cd-f2cc54c7b45c, 'name': SearchDatastore_Task, 'duration_secs': 0.008209} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.798700] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-238b2a3c-29e3-44d1-a5a0-4a966f84d4e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.803931] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1072.803931] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5216ef0c-fb9b-5816-671e-be479f05962d" [ 1072.803931] env[63028]: _type = "Task" [ 1072.803931] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.811388] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5216ef0c-fb9b-5816-671e-be479f05962d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.841939] env[63028]: DEBUG nova.compute.manager [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1072.842087] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1072.842939] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9d6de9-1652-4a89-a02c-adee89eb792d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.850179] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1072.850402] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-496fc55c-b137-4b53-b78c-484cadb69bce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.857226] env[63028]: DEBUG oslo_vmware.api [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1072.857226] env[63028]: value = "task-2736232" [ 1072.857226] env[63028]: _type = "Task" [ 1072.857226] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.864748] env[63028]: DEBUG oslo_vmware.api [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.998024] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.060694] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 6865f832-d409-4b9b-8b6c-33b0bf07d2b2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1073.060966] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1073.060966] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3520MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1073.077644] env[63028]: DEBUG oslo_vmware.api [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736231, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161333} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.077865] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.078062] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1073.078236] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1073.078418] env[63028]: INFO nova.compute.manager [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1073.079336] env[63028]: DEBUG oslo.service.loopingcall [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1073.079602] env[63028]: DEBUG nova.compute.manager [-] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1073.079765] env[63028]: DEBUG nova.network.neutron [-] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1073.131817] env[63028]: DEBUG oslo_concurrency.lockutils [req-b86a4894-1633-4986-aa3b-53a3530a7eba req-86f5d11b-24e6-40b5-b6ae-b2df41a549dd service nova] Releasing lock "refresh_cache-4ec96b68-2fdb-4150-8d26-53fdf79c8e26" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.315260] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5216ef0c-fb9b-5816-671e-be479f05962d, 'name': SearchDatastore_Task, 'duration_secs': 0.009964} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.315519] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.315858] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 4ec96b68-2fdb-4150-8d26-53fdf79c8e26/4ec96b68-2fdb-4150-8d26-53fdf79c8e26.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1073.316344] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10202bff-56f0-4cb6-a5df-1ca9b3f2d3ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.323328] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1073.323328] env[63028]: value = "task-2736233" [ 1073.323328] env[63028]: _type = "Task" [ 1073.323328] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.332914] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736233, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.371024] env[63028]: DEBUG oslo_vmware.api [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736232, 'name': PowerOffVM_Task, 'duration_secs': 0.186484} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.371024] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1073.371024] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1073.371024] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d0aebac5-a6b2-4f41-8c01-ca121040ee75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.376147] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ddd574-c62f-4f85-9eaf-1a304c3ba58c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.383503] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a7b717-235d-425f-952a-350b67430297 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.417603] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c37fcd7-31c4-4e7c-b041-164ef1bf4e4d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.426794] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c005e983-efc4-4a98-aa52-10deb728a9c8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.432763] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1073.432763] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1073.432914] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleting the datastore file [datastore2] 85aafadb-81d6-4687-aed1-fbe829e5f95f {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1073.433900] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d3700d2-6bda-4879-8235-16867d910a8a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.443381] env[63028]: DEBUG nova.compute.provider_tree [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1073.449657] env[63028]: DEBUG oslo_vmware.api [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1073.449657] env[63028]: value = "task-2736235" [ 1073.449657] env[63028]: _type = "Task" [ 1073.449657] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.457269] env[63028]: DEBUG oslo_vmware.api [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.567806] env[63028]: DEBUG nova.network.neutron [-] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.597650] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.598038] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.598382] env[63028]: DEBUG nova.network.neutron [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1073.797276] env[63028]: DEBUG nova.network.neutron [-] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.833974] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736233, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.947008] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1073.959144] env[63028]: DEBUG oslo_vmware.api [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189432} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.959387] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.959573] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1073.959746] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1073.959922] env[63028]: INFO nova.compute.manager [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1073.960376] env[63028]: DEBUG oslo.service.loopingcall [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1073.961268] env[63028]: DEBUG nova.compute.manager [-] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1073.961367] env[63028]: DEBUG nova.network.neutron [-] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1073.964068] env[63028]: DEBUG nova.compute.manager [req-8eb1dc9f-35a6-4951-817f-3ae0c65c3f48 req-1c24f931-e8b0-424e-bd50-af88a2f157c8 service nova] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Received event network-vif-deleted-00aeadf5-50e8-433f-a1da-81ac2a8f259f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1073.964244] env[63028]: DEBUG nova.compute.manager [req-8eb1dc9f-35a6-4951-817f-3ae0c65c3f48 req-1c24f931-e8b0-424e-bd50-af88a2f157c8 service nova] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Received event network-vif-deleted-1f5c01d1-9623-425e-8309-336dd1d961fa {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1074.070602] env[63028]: INFO nova.compute.manager [-] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Took 1.30 seconds to deallocate network for instance. [ 1074.300388] env[63028]: INFO nova.compute.manager [-] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Took 1.22 seconds to deallocate network for instance. [ 1074.334262] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736233, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.603322} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.335390] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 4ec96b68-2fdb-4150-8d26-53fdf79c8e26/4ec96b68-2fdb-4150-8d26-53fdf79c8e26.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1074.335782] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1074.336132] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1b52de7-7ae4-4063-8620-f2a2b389e9f7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.343318] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1074.343318] env[63028]: value = "task-2736236" [ 1074.343318] env[63028]: _type = "Task" [ 1074.343318] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.352726] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736236, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.427304] env[63028]: DEBUG nova.network.neutron [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance_info_cache with network_info: [{"id": "c5f1d585-d624-4525-a5b2-132b18bf9378", "address": "fa:16:3e:93:da:98", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5f1d585-d6", "ovs_interfaceid": "c5f1d585-d624-4525-a5b2-132b18bf9378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.455676] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1074.455913] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.457s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.456204] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.496s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.456440] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.461036] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.980s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.461036] env[63028]: INFO nova.compute.claims [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.493328] env[63028]: INFO nova.scheduler.client.report [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Deleted allocations for instance 8bb61bfa-d44e-4e06-867a-445d9b3db660 [ 1074.577746] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.807527] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.856624] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736236, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115025} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.857444] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1074.860918] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14309d46-1a0c-4256-99a2-2433def76036 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.885312] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 4ec96b68-2fdb-4150-8d26-53fdf79c8e26/4ec96b68-2fdb-4150-8d26-53fdf79c8e26.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1074.885684] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18e8b0db-9a0d-4156-992f-ff6fee19a7ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.910389] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1074.910389] env[63028]: value = "task-2736237" [ 1074.910389] env[63028]: _type = "Task" [ 1074.910389] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.920572] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736237, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.930107] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1075.003809] env[63028]: DEBUG oslo_concurrency.lockutils [None req-94b92cf4-72cd-48b1-86e7-cfb6a1cebfbe tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "8bb61bfa-d44e-4e06-867a-445d9b3db660" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.020s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.286745] env[63028]: DEBUG nova.network.neutron [-] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.418602] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736237, 'name': ReconfigVM_Task, 'duration_secs': 0.308461} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.418874] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 4ec96b68-2fdb-4150-8d26-53fdf79c8e26/4ec96b68-2fdb-4150-8d26-53fdf79c8e26.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1075.419495] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-517825f8-f2cf-43ea-a2b9-309f24353088 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.425234] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1075.425234] env[63028]: value = "task-2736239" [ 1075.425234] env[63028]: _type = "Task" [ 1075.425234] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.433151] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736239, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.439146] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a04ed10-f7ec-42fe-8d5b-832fec74b33b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.444976] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a27bc6e-2c78-4d30-9cdc-f42cab43dd42 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.743074] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99200c71-6550-4e10-acd0-676f77bce44d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.751946] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4efb979-5a07-474f-aaa1-1b4aec188163 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.781201] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1040c9c6-cb17-4f98-a193-f610b6642281 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.787896] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92d9cb8-b979-4090-88a2-4e59b46bf058 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.791715] env[63028]: INFO nova.compute.manager [-] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Took 1.83 seconds to deallocate network for instance. [ 1075.803498] env[63028]: DEBUG nova.compute.provider_tree [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.935292] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736239, 'name': Rename_Task, 'duration_secs': 0.13229} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.935599] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1075.935831] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f934a54-d7d8-427d-a9ee-13381d35b24d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.943983] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1075.943983] env[63028]: value = "task-2736240" [ 1075.943983] env[63028]: _type = "Task" [ 1075.943983] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.951321] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736240, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.987474] env[63028]: DEBUG nova.compute.manager [req-320e7d95-bc71-479b-973a-b9638169cad3 req-dae2eead-5bc1-4137-9278-3981c63323ae service nova] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Received event network-vif-deleted-2e2d8403-826c-4e24-ba3c-123d444d1fdc {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1076.306948] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.307768] env[63028]: DEBUG nova.scheduler.client.report [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1076.455896] env[63028]: DEBUG oslo_vmware.api [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736240, 'name': PowerOnVM_Task, 'duration_secs': 0.458912} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.456171] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1076.456381] env[63028]: INFO nova.compute.manager [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Took 6.89 seconds to spawn the instance on the hypervisor. [ 1076.456554] env[63028]: DEBUG nova.compute.manager [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1076.457285] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5672416-dc10-4ccf-afd5-2c71a4beb9ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.542486] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d9822e-6df0-4d43-91ed-2c71a8dde4dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.564202] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631900ae-06fd-4a11-8135-ab0b1dfd7516 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.571290] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance 'd41a1eae-bb89-4222-9466-d86af891c654' progress to 83 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1076.813144] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.354s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.813739] env[63028]: DEBUG nova.compute.manager [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1076.816466] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.556s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.816676] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.818657] env[63028]: DEBUG oslo_concurrency.lockutils [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.999s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.818847] env[63028]: DEBUG oslo_concurrency.lockutils [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.820645] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.919s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.820830] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.822629] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.940s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.824081] env[63028]: INFO nova.compute.claims [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1076.843117] env[63028]: INFO nova.scheduler.client.report [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Deleted allocations for instance 1cf111f2-df5e-48a6-905a-bc2d3ea45202 [ 1076.845246] env[63028]: INFO nova.scheduler.client.report [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Deleted allocations for instance 3566ab6f-1f8a-472d-9efb-47fa2520a215 [ 1076.857949] env[63028]: INFO nova.scheduler.client.report [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Deleted allocations for instance 56d6982d-9f76-4952-8c8b-f64b3c8d02fe [ 1076.972357] env[63028]: INFO nova.compute.manager [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Took 23.51 seconds to build instance. [ 1077.077853] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1077.078205] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-719fe969-82f4-471e-a8e4-b993d979febd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.085155] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1077.085155] env[63028]: value = "task-2736241" [ 1077.085155] env[63028]: _type = "Task" [ 1077.085155] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.093821] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736241, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.328973] env[63028]: DEBUG nova.compute.utils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1077.333080] env[63028]: DEBUG nova.compute.manager [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1077.333430] env[63028]: DEBUG nova.network.neutron [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1077.355534] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f95dac67-55ca-4cac-9cc2-49f5bfc97fa0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "1cf111f2-df5e-48a6-905a-bc2d3ea45202" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.953s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.356935] env[63028]: DEBUG oslo_concurrency.lockutils [None req-df73c557-d2b1-4439-881a-38c263ae7aa1 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "3566ab6f-1f8a-472d-9efb-47fa2520a215" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.055s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.365629] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7590fdc3-7918-4146-9016-8089567d1b4a tempest-SecurityGroupsTestJSON-350708257 tempest-SecurityGroupsTestJSON-350708257-project-member] Lock "56d6982d-9f76-4952-8c8b-f64b3c8d02fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.181s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.389692] env[63028]: DEBUG nova.policy [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43ed2fb3f1a944fdac8ee7778f171cd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8efc6d89903c454eb39136a76e0adef5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1077.474481] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f2059770-ba5f-433e-a3f5-b0c33884aea0 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.021s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.596227] env[63028]: DEBUG oslo_vmware.api [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736241, 'name': PowerOnVM_Task, 'duration_secs': 0.428405} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.596508] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1077.596695] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b28eb651-7c74-4410-891e-a25b642e44b0 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance 'd41a1eae-bb89-4222-9466-d86af891c654' progress to 100 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1077.674492] env[63028]: DEBUG nova.compute.manager [req-cb2ddb25-9041-4269-b39f-3f7aefc432f2 req-25a57254-245d-48ba-8d94-7e699178dd5b service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Received event network-changed-78ea2d63-5ca1-4e37-808c-688a7c0fc30e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1077.674697] env[63028]: DEBUG nova.compute.manager [req-cb2ddb25-9041-4269-b39f-3f7aefc432f2 req-25a57254-245d-48ba-8d94-7e699178dd5b service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Refreshing instance network info cache due to event network-changed-78ea2d63-5ca1-4e37-808c-688a7c0fc30e. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1077.675674] env[63028]: DEBUG oslo_concurrency.lockutils [req-cb2ddb25-9041-4269-b39f-3f7aefc432f2 req-25a57254-245d-48ba-8d94-7e699178dd5b service nova] Acquiring lock "refresh_cache-4ec96b68-2fdb-4150-8d26-53fdf79c8e26" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1077.675674] env[63028]: DEBUG oslo_concurrency.lockutils [req-cb2ddb25-9041-4269-b39f-3f7aefc432f2 req-25a57254-245d-48ba-8d94-7e699178dd5b service nova] Acquired lock "refresh_cache-4ec96b68-2fdb-4150-8d26-53fdf79c8e26" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.675674] env[63028]: DEBUG nova.network.neutron [req-cb2ddb25-9041-4269-b39f-3f7aefc432f2 req-25a57254-245d-48ba-8d94-7e699178dd5b service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Refreshing network info cache for port 78ea2d63-5ca1-4e37-808c-688a7c0fc30e {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1077.720810] env[63028]: DEBUG nova.network.neutron [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Successfully created port: b2a35d65-e481-4d0a-8dc1-3b3b8dec509b {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1077.835757] env[63028]: DEBUG nova.compute.manager [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1078.040573] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.041221] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.076950] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "719e014f-0544-4832-81ae-26b028b17be0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.076950] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "719e014f-0544-4832-81ae-26b028b17be0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.228076] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c7d417-3e53-4890-834e-9d9f74dfbae0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.238463] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f49c233-e3ca-4745-9200-101c02fd9145 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.274973] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a380efaf-855c-4e8a-aa3e-47cea6d012bf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.282976] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53edc257-55f7-48d8-9c13-ecb6297308bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.297024] env[63028]: DEBUG nova.compute.provider_tree [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.543571] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1078.546671] env[63028]: DEBUG nova.network.neutron [req-cb2ddb25-9041-4269-b39f-3f7aefc432f2 req-25a57254-245d-48ba-8d94-7e699178dd5b service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Updated VIF entry in instance network info cache for port 78ea2d63-5ca1-4e37-808c-688a7c0fc30e. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1078.547041] env[63028]: DEBUG nova.network.neutron [req-cb2ddb25-9041-4269-b39f-3f7aefc432f2 req-25a57254-245d-48ba-8d94-7e699178dd5b service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Updating instance_info_cache with network_info: [{"id": "78ea2d63-5ca1-4e37-808c-688a7c0fc30e", "address": "fa:16:3e:d6:65:0c", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78ea2d63-5c", "ovs_interfaceid": "78ea2d63-5ca1-4e37-808c-688a7c0fc30e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.576832] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1078.800556] env[63028]: DEBUG nova.scheduler.client.report [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.850800] env[63028]: DEBUG nova.compute.manager [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1078.877452] env[63028]: DEBUG nova.virt.hardware [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1078.877760] env[63028]: DEBUG nova.virt.hardware [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1078.877956] env[63028]: DEBUG nova.virt.hardware [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1078.878195] env[63028]: DEBUG nova.virt.hardware [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1078.878368] env[63028]: DEBUG nova.virt.hardware [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1078.878526] env[63028]: DEBUG nova.virt.hardware [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1078.878780] env[63028]: DEBUG nova.virt.hardware [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1078.878969] env[63028]: DEBUG nova.virt.hardware [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1078.879193] env[63028]: DEBUG nova.virt.hardware [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1078.879397] env[63028]: DEBUG nova.virt.hardware [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1078.879606] env[63028]: DEBUG nova.virt.hardware [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1078.880569] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2848db9-fc79-442b-ae14-d589cbbe78b7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.889178] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f454c9-06c3-46e2-ac00-0491e641406c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.051374] env[63028]: DEBUG oslo_concurrency.lockutils [req-cb2ddb25-9041-4269-b39f-3f7aefc432f2 req-25a57254-245d-48ba-8d94-7e699178dd5b service nova] Releasing lock "refresh_cache-4ec96b68-2fdb-4150-8d26-53fdf79c8e26" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.076314] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.097240] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.305672] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.306245] env[63028]: DEBUG nova.compute.manager [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1079.311576] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.504s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.311576] env[63028]: INFO nova.compute.claims [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1079.366524] env[63028]: DEBUG nova.compute.manager [req-a44499fe-0594-45ed-81c9-4c5cc675f80e req-8a7b9337-1d8f-4866-8396-0ec2950934ea service nova] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Received event network-vif-plugged-b2a35d65-e481-4d0a-8dc1-3b3b8dec509b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1079.366747] env[63028]: DEBUG oslo_concurrency.lockutils [req-a44499fe-0594-45ed-81c9-4c5cc675f80e req-8a7b9337-1d8f-4866-8396-0ec2950934ea service nova] Acquiring lock "1f8415cc-f544-4c89-9863-43d5ae9144e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.367046] env[63028]: DEBUG oslo_concurrency.lockutils [req-a44499fe-0594-45ed-81c9-4c5cc675f80e req-8a7b9337-1d8f-4866-8396-0ec2950934ea service nova] Lock "1f8415cc-f544-4c89-9863-43d5ae9144e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.367147] env[63028]: DEBUG oslo_concurrency.lockutils [req-a44499fe-0594-45ed-81c9-4c5cc675f80e req-8a7b9337-1d8f-4866-8396-0ec2950934ea service nova] Lock "1f8415cc-f544-4c89-9863-43d5ae9144e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.367317] env[63028]: DEBUG nova.compute.manager [req-a44499fe-0594-45ed-81c9-4c5cc675f80e req-8a7b9337-1d8f-4866-8396-0ec2950934ea service nova] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] No waiting events found dispatching network-vif-plugged-b2a35d65-e481-4d0a-8dc1-3b3b8dec509b {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1079.367484] env[63028]: WARNING nova.compute.manager [req-a44499fe-0594-45ed-81c9-4c5cc675f80e req-8a7b9337-1d8f-4866-8396-0ec2950934ea service nova] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Received unexpected event network-vif-plugged-b2a35d65-e481-4d0a-8dc1-3b3b8dec509b for instance with vm_state building and task_state spawning. [ 1079.579071] env[63028]: DEBUG nova.network.neutron [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Successfully updated port: b2a35d65-e481-4d0a-8dc1-3b3b8dec509b {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1079.817341] env[63028]: DEBUG nova.compute.utils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1079.821431] env[63028]: DEBUG nova.compute.manager [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1079.821749] env[63028]: DEBUG nova.network.neutron [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1079.845050] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "f0ca0d73-d428-4b8c-acac-a80b7b7dd793" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.845050] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "f0ca0d73-d428-4b8c-acac-a80b7b7dd793" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.845050] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "f0ca0d73-d428-4b8c-acac-a80b7b7dd793-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.845050] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "f0ca0d73-d428-4b8c-acac-a80b7b7dd793-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.845050] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "f0ca0d73-d428-4b8c-acac-a80b7b7dd793-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.846182] env[63028]: INFO nova.compute.manager [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Terminating instance [ 1079.879936] env[63028]: DEBUG nova.network.neutron [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Port c5f1d585-d624-4525-a5b2-132b18bf9378 binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1079.879936] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.879936] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.879936] env[63028]: DEBUG nova.network.neutron [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.882100] env[63028]: DEBUG nova.policy [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b48f3f2a85945379bdb33bf153bde9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25a6457f62d149629c09589feb1a550c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1080.081534] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "refresh_cache-1f8415cc-f544-4c89-9863-43d5ae9144e8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.081696] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "refresh_cache-1f8415cc-f544-4c89-9863-43d5ae9144e8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.081844] env[63028]: DEBUG nova.network.neutron [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.206260] env[63028]: DEBUG nova.network.neutron [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Successfully created port: 197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1080.320731] env[63028]: DEBUG nova.compute.manager [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1080.352803] env[63028]: DEBUG nova.compute.manager [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1080.353311] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1080.354795] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7671d77-e00e-415a-89c8-2b2afa0573ab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.363825] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1080.364286] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8e88410-54b7-4f4d-94c0-1a803f8fe4d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.373965] env[63028]: DEBUG oslo_vmware.api [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1080.373965] env[63028]: value = "task-2736243" [ 1080.373965] env[63028]: _type = "Task" [ 1080.373965] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.383696] env[63028]: DEBUG oslo_vmware.api [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736243, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.665447] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0e6452-b99f-4e05-a147-9b77c54cb971 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.671519] env[63028]: DEBUG nova.network.neutron [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1080.678187] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdffda5-1d0e-47ff-a655-e0cfdd573ec8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.717589] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1da0e2a-b148-438f-a26f-4381df26ab6b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.725958] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a85b07-ff9c-4cb6-ab28-d1d653ed16c8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.740251] env[63028]: DEBUG nova.compute.provider_tree [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.831314] env[63028]: DEBUG nova.network.neutron [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance_info_cache with network_info: [{"id": "c5f1d585-d624-4525-a5b2-132b18bf9378", "address": "fa:16:3e:93:da:98", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5f1d585-d6", "ovs_interfaceid": "c5f1d585-d624-4525-a5b2-132b18bf9378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.886103] env[63028]: DEBUG oslo_vmware.api [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736243, 'name': PowerOffVM_Task, 'duration_secs': 0.263272} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.887794] env[63028]: DEBUG nova.network.neutron [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Updating instance_info_cache with network_info: [{"id": "b2a35d65-e481-4d0a-8dc1-3b3b8dec509b", "address": "fa:16:3e:fd:34:eb", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2a35d65-e4", "ovs_interfaceid": "b2a35d65-e481-4d0a-8dc1-3b3b8dec509b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.888511] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1080.888717] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1080.890042] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-717bd0ac-63f3-4db2-9a68-b44dc5878fda {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.952191] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1080.953705] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1080.953999] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Deleting the datastore file [datastore1] f0ca0d73-d428-4b8c-acac-a80b7b7dd793 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1080.954676] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e0e0054-b137-4b07-a4e2-a4c9a02a83a1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.962409] env[63028]: DEBUG oslo_vmware.api [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1080.962409] env[63028]: value = "task-2736245" [ 1080.962409] env[63028]: _type = "Task" [ 1080.962409] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.972546] env[63028]: DEBUG oslo_vmware.api [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736245, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.246382] env[63028]: DEBUG nova.scheduler.client.report [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1081.336796] env[63028]: DEBUG nova.compute.manager [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1081.339138] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.342621] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "f804ec95-0b97-4960-844d-b678b97fc401" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.342857] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.368011] env[63028]: DEBUG nova.virt.hardware [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1081.368180] env[63028]: DEBUG nova.virt.hardware [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1081.368224] env[63028]: DEBUG nova.virt.hardware [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1081.368416] env[63028]: DEBUG nova.virt.hardware [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1081.368562] env[63028]: DEBUG nova.virt.hardware [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1081.368708] env[63028]: DEBUG nova.virt.hardware [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1081.368921] env[63028]: DEBUG nova.virt.hardware [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1081.369569] env[63028]: DEBUG nova.virt.hardware [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1081.369569] env[63028]: DEBUG nova.virt.hardware [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1081.369569] env[63028]: DEBUG nova.virt.hardware [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1081.369569] env[63028]: DEBUG nova.virt.hardware [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1081.370460] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d61c43-8e0e-44c5-b091-f8ae6e1e17c3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.381819] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dadedd-08db-41f2-966d-689d832c4d0f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.397112] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "refresh_cache-1f8415cc-f544-4c89-9863-43d5ae9144e8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.397468] env[63028]: DEBUG nova.compute.manager [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Instance network_info: |[{"id": "b2a35d65-e481-4d0a-8dc1-3b3b8dec509b", "address": "fa:16:3e:fd:34:eb", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2a35d65-e4", "ovs_interfaceid": "b2a35d65-e481-4d0a-8dc1-3b3b8dec509b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1081.399441] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:34:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2a35d65-e481-4d0a-8dc1-3b3b8dec509b', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1081.408194] env[63028]: DEBUG oslo.service.loopingcall [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1081.409370] env[63028]: DEBUG nova.compute.manager [req-3361ab7f-8df2-4fd1-8d2e-c4c46b3c9ecb req-630fcfdd-8a2b-4d6c-a173-4b80fea27842 service nova] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Received event network-changed-b2a35d65-e481-4d0a-8dc1-3b3b8dec509b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1081.409600] env[63028]: DEBUG nova.compute.manager [req-3361ab7f-8df2-4fd1-8d2e-c4c46b3c9ecb req-630fcfdd-8a2b-4d6c-a173-4b80fea27842 service nova] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Refreshing instance network info cache due to event network-changed-b2a35d65-e481-4d0a-8dc1-3b3b8dec509b. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1081.409850] env[63028]: DEBUG oslo_concurrency.lockutils [req-3361ab7f-8df2-4fd1-8d2e-c4c46b3c9ecb req-630fcfdd-8a2b-4d6c-a173-4b80fea27842 service nova] Acquiring lock "refresh_cache-1f8415cc-f544-4c89-9863-43d5ae9144e8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.409996] env[63028]: DEBUG oslo_concurrency.lockutils [req-3361ab7f-8df2-4fd1-8d2e-c4c46b3c9ecb req-630fcfdd-8a2b-4d6c-a173-4b80fea27842 service nova] Acquired lock "refresh_cache-1f8415cc-f544-4c89-9863-43d5ae9144e8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.410208] env[63028]: DEBUG nova.network.neutron [req-3361ab7f-8df2-4fd1-8d2e-c4c46b3c9ecb req-630fcfdd-8a2b-4d6c-a173-4b80fea27842 service nova] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Refreshing network info cache for port b2a35d65-e481-4d0a-8dc1-3b3b8dec509b {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1081.411610] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1081.412072] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fbb83561-635b-4172-8ab9-a3b0f0d6fae8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.434654] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1081.434654] env[63028]: value = "task-2736246" [ 1081.434654] env[63028]: _type = "Task" [ 1081.434654] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.444460] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736246, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.478777] env[63028]: DEBUG oslo_vmware.api [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736245, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215202} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.479201] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1081.479301] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1081.479629] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1081.480054] env[63028]: INFO nova.compute.manager [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1081.480054] env[63028]: DEBUG oslo.service.loopingcall [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1081.480292] env[63028]: DEBUG nova.compute.manager [-] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1081.480942] env[63028]: DEBUG nova.network.neutron [-] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1081.751870] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.442s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.752517] env[63028]: DEBUG nova.compute.manager [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1081.755856] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.779s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.757464] env[63028]: INFO nova.compute.claims [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1081.844649] env[63028]: DEBUG nova.compute.manager [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63028) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1081.851322] env[63028]: DEBUG nova.compute.utils [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1081.948990] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736246, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.130749] env[63028]: DEBUG nova.network.neutron [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Successfully updated port: 197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1082.216301] env[63028]: DEBUG nova.network.neutron [req-3361ab7f-8df2-4fd1-8d2e-c4c46b3c9ecb req-630fcfdd-8a2b-4d6c-a173-4b80fea27842 service nova] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Updated VIF entry in instance network info cache for port b2a35d65-e481-4d0a-8dc1-3b3b8dec509b. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1082.216301] env[63028]: DEBUG nova.network.neutron [req-3361ab7f-8df2-4fd1-8d2e-c4c46b3c9ecb req-630fcfdd-8a2b-4d6c-a173-4b80fea27842 service nova] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Updating instance_info_cache with network_info: [{"id": "b2a35d65-e481-4d0a-8dc1-3b3b8dec509b", "address": "fa:16:3e:fd:34:eb", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2a35d65-e4", "ovs_interfaceid": "b2a35d65-e481-4d0a-8dc1-3b3b8dec509b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.262205] env[63028]: DEBUG nova.compute.utils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1082.265696] env[63028]: DEBUG nova.compute.manager [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1082.265914] env[63028]: DEBUG nova.network.neutron [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1082.309768] env[63028]: DEBUG nova.policy [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b7b0fca3c0f445e84027e634e36279e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '827319c1bd1d4a92842c023f78e9036f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1082.352700] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.360230] env[63028]: DEBUG nova.network.neutron [-] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.446071] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736246, 'name': CreateVM_Task, 'duration_secs': 0.646306} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.446261] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1082.446976] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.447195] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.447728] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1082.447815] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06221bce-391b-4c31-bb03-33874b295410 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.451926] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1082.451926] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f991d6-fde2-c08f-081d-2cbf7330e7d6" [ 1082.451926] env[63028]: _type = "Task" [ 1082.451926] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.459886] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f991d6-fde2-c08f-081d-2cbf7330e7d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.564775] env[63028]: DEBUG nova.network.neutron [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Successfully created port: 34bcf965-61f1-430c-a4dd-fab458782af7 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1082.634465] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.634643] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.634778] env[63028]: DEBUG nova.network.neutron [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1082.719507] env[63028]: DEBUG oslo_concurrency.lockutils [req-3361ab7f-8df2-4fd1-8d2e-c4c46b3c9ecb req-630fcfdd-8a2b-4d6c-a173-4b80fea27842 service nova] Releasing lock "refresh_cache-1f8415cc-f544-4c89-9863-43d5ae9144e8" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.773505] env[63028]: DEBUG nova.compute.manager [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1082.864153] env[63028]: INFO nova.compute.manager [-] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Took 1.38 seconds to deallocate network for instance. [ 1082.963048] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f991d6-fde2-c08f-081d-2cbf7330e7d6, 'name': SearchDatastore_Task, 'duration_secs': 0.017578} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.965566] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.965797] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1082.966030] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.966185] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.968737] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1082.968737] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8850ccb4-1167-4390-a142-8589aa4e48fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.974490] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1082.974650] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1082.977435] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1688e16a-5c8b-44f6-be89-b3bb1393fa55 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.982538] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1082.982538] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c94d19-5506-2e73-7bca-d45c54537c30" [ 1082.982538] env[63028]: _type = "Task" [ 1082.982538] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.987432] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.990329] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c94d19-5506-2e73-7bca-d45c54537c30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.061931] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f9a6e2-272a-4929-95af-324ce66ddbe6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.069485] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f15b2f2-7263-4f54-996c-92a1ecf655ed {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.099802] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0368d25b-a3a7-4de2-8071-9e07bd6e7bfe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.106644] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78fdf523-c4c0-4350-8b01-bc20bd8b1ae9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.119937] env[63028]: DEBUG nova.compute.provider_tree [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1083.166284] env[63028]: DEBUG nova.network.neutron [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1083.332015] env[63028]: DEBUG nova.network.neutron [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updating instance_info_cache with network_info: [{"id": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "address": "fa:16:3e:cb:8a:7c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap197b3459-f9", "ovs_interfaceid": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.369865] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.416177] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "f804ec95-0b97-4960-844d-b678b97fc401" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.416489] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.416732] env[63028]: INFO nova.compute.manager [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Attaching volume 79c32ea7-4ed7-4f3a-9a4d-e052462647fc to /dev/sdb [ 1083.429073] env[63028]: DEBUG nova.compute.manager [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Received event network-vif-deleted-f5e9674b-220d-4e59-bec7-5b1512efb29e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1083.429270] env[63028]: DEBUG nova.compute.manager [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Received event network-vif-plugged-197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1083.429462] env[63028]: DEBUG oslo_concurrency.lockutils [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] Acquiring lock "899496ae-8463-42e0-a287-b141d956fa0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.429645] env[63028]: DEBUG oslo_concurrency.lockutils [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] Lock "899496ae-8463-42e0-a287-b141d956fa0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.429816] env[63028]: DEBUG oslo_concurrency.lockutils [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] Lock "899496ae-8463-42e0-a287-b141d956fa0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.429986] env[63028]: DEBUG nova.compute.manager [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] No waiting events found dispatching network-vif-plugged-197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1083.430554] env[63028]: WARNING nova.compute.manager [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Received unexpected event network-vif-plugged-197b3459-f9f1-4fe3-a9ad-169350b4d637 for instance with vm_state building and task_state spawning. [ 1083.430554] env[63028]: DEBUG nova.compute.manager [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Received event network-changed-197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1083.430554] env[63028]: DEBUG nova.compute.manager [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Refreshing instance network info cache due to event network-changed-197b3459-f9f1-4fe3-a9ad-169350b4d637. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1083.430849] env[63028]: DEBUG oslo_concurrency.lockutils [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] Acquiring lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.453998] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b7aa67-11f5-4343-a9cc-57b188f19c27 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.460426] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff56182-c9f2-4bdd-95f7-71df0727f6a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.474384] env[63028]: DEBUG nova.virt.block_device [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Updating existing volume attachment record: 5c855fd7-9498-4a92-a872-1af5188fe335 {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1083.492106] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c94d19-5506-2e73-7bca-d45c54537c30, 'name': SearchDatastore_Task, 'duration_secs': 0.008056} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.492918] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8d066b4-08b9-473f-941e-131f1cdc24e6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.498320] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1083.498320] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52848c66-2470-0e77-fc1f-0f774c0dcfc5" [ 1083.498320] env[63028]: _type = "Task" [ 1083.498320] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.506376] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52848c66-2470-0e77-fc1f-0f774c0dcfc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.622726] env[63028]: DEBUG nova.scheduler.client.report [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1083.785578] env[63028]: DEBUG nova.compute.manager [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1083.813718] env[63028]: DEBUG nova.virt.hardware [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1083.814026] env[63028]: DEBUG nova.virt.hardware [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.814218] env[63028]: DEBUG nova.virt.hardware [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1083.814431] env[63028]: DEBUG nova.virt.hardware [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.814585] env[63028]: DEBUG nova.virt.hardware [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1083.814734] env[63028]: DEBUG nova.virt.hardware [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1083.814946] env[63028]: DEBUG nova.virt.hardware [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1083.815120] env[63028]: DEBUG nova.virt.hardware [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1083.815290] env[63028]: DEBUG nova.virt.hardware [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1083.815449] env[63028]: DEBUG nova.virt.hardware [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1083.815624] env[63028]: DEBUG nova.virt.hardware [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1083.816560] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7286e8dc-e208-4498-8ce0-2b26e79ef535 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.823962] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f17430-c2b0-42b3-9f0c-85f6e6bb9bf7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.840227] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.840532] env[63028]: DEBUG nova.compute.manager [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Instance network_info: |[{"id": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "address": "fa:16:3e:cb:8a:7c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap197b3459-f9", "ovs_interfaceid": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1083.840999] env[63028]: DEBUG oslo_concurrency.lockutils [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] Acquired lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.841280] env[63028]: DEBUG nova.network.neutron [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Refreshing network info cache for port 197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1083.842463] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:8a:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '197b3459-f9f1-4fe3-a9ad-169350b4d637', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.850140] env[63028]: DEBUG oslo.service.loopingcall [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1083.853203] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.853685] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88d6f1a0-8134-4666-bdb0-6c2d31cbed48 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.873834] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.873834] env[63028]: value = "task-2736250" [ 1083.873834] env[63028]: _type = "Task" [ 1083.873834] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.881805] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736250, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.008894] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52848c66-2470-0e77-fc1f-0f774c0dcfc5, 'name': SearchDatastore_Task, 'duration_secs': 0.015854} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.009232] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.009424] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 1f8415cc-f544-4c89-9863-43d5ae9144e8/1f8415cc-f544-4c89-9863-43d5ae9144e8.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1084.009687] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d9cc15d-d976-4854-889d-b4ebec549089 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.016111] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1084.016111] env[63028]: value = "task-2736253" [ 1084.016111] env[63028]: _type = "Task" [ 1084.016111] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.024941] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736253, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.128041] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.128752] env[63028]: DEBUG nova.compute.manager [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1084.132626] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.135s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.134686] env[63028]: INFO nova.compute.claims [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1084.384178] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736250, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.527447] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736253, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.547689] env[63028]: DEBUG nova.network.neutron [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Successfully updated port: 34bcf965-61f1-430c-a4dd-fab458782af7 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1084.552229] env[63028]: DEBUG nova.network.neutron [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updated VIF entry in instance network info cache for port 197b3459-f9f1-4fe3-a9ad-169350b4d637. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1084.552692] env[63028]: DEBUG nova.network.neutron [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updating instance_info_cache with network_info: [{"id": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "address": "fa:16:3e:cb:8a:7c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap197b3459-f9", "ovs_interfaceid": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.644706] env[63028]: DEBUG nova.compute.utils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1084.649234] env[63028]: DEBUG nova.compute.manager [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1084.649429] env[63028]: DEBUG nova.network.neutron [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1084.686064] env[63028]: DEBUG nova.policy [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54076b7d25474185b3f205437cb68be8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68de7445caeb4381b9e68c685ccb5e0b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1084.884448] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736250, 'name': CreateVM_Task, 'duration_secs': 0.55863} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.887282] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1084.887282] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.887282] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.887282] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1084.887282] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8df25934-615b-480d-9005-395c7d597cfb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.891464] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1084.891464] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fbd84b-1fdf-4441-88c9-100c9109a1a4" [ 1084.891464] env[63028]: _type = "Task" [ 1084.891464] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.899568] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fbd84b-1fdf-4441-88c9-100c9109a1a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.956220] env[63028]: DEBUG nova.network.neutron [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Successfully created port: 4c30382c-2b94-4990-b3d6-533480eb847b {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1085.027591] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736253, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.567674} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.027860] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 1f8415cc-f544-4c89-9863-43d5ae9144e8/1f8415cc-f544-4c89-9863-43d5ae9144e8.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1085.028089] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1085.028347] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16c0f53d-ae72-4f73-9c0f-cefeba80cfcd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.035067] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1085.035067] env[63028]: value = "task-2736254" [ 1085.035067] env[63028]: _type = "Task" [ 1085.035067] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.046617] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736254, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.055613] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Acquiring lock "refresh_cache-2add1602-122e-41d7-af83-b71d8dab9288" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.055765] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Acquired lock "refresh_cache-2add1602-122e-41d7-af83-b71d8dab9288" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.055914] env[63028]: DEBUG nova.network.neutron [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1085.057416] env[63028]: DEBUG oslo_concurrency.lockutils [req-01790cc0-250d-4e29-b6bd-96d0e453e3ca req-9689de18-2ee7-433f-aa1e-2f74733fe07b service nova] Releasing lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.155490] env[63028]: DEBUG nova.compute.manager [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1085.405415] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fbd84b-1fdf-4441-88c9-100c9109a1a4, 'name': SearchDatastore_Task, 'duration_secs': 0.030343} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.408130] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.408374] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1085.408609] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.408760] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.408937] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1085.410030] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dac7d125-f929-48ef-87ea-2edb8f3941a7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.422852] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1085.423070] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1085.424243] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73750e6e-8da1-43c4-9b99-9d230bffc436 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.430538] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1085.430538] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ca6e98-5b1d-cccd-ae0e-660d8cfbbd95" [ 1085.430538] env[63028]: _type = "Task" [ 1085.430538] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.443046] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ca6e98-5b1d-cccd-ae0e-660d8cfbbd95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.462555] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0337da-b725-49ec-b62a-74864d6cc194 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.474243] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a688fa-211a-4cc4-b4d0-1543b403e37a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.510065] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c8886a-3d3f-42c3-b05c-7e63b33d6830 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.518289] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fea110d-5fd1-4dbf-a086-0efd0d318856 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.532469] env[63028]: DEBUG nova.compute.provider_tree [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1085.544349] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736254, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072632} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.545267] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1085.546493] env[63028]: DEBUG nova.compute.manager [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Received event network-vif-plugged-34bcf965-61f1-430c-a4dd-fab458782af7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1085.546567] env[63028]: DEBUG oslo_concurrency.lockutils [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] Acquiring lock "2add1602-122e-41d7-af83-b71d8dab9288-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.546797] env[63028]: DEBUG oslo_concurrency.lockutils [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] Lock "2add1602-122e-41d7-af83-b71d8dab9288-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.546973] env[63028]: DEBUG oslo_concurrency.lockutils [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] Lock "2add1602-122e-41d7-af83-b71d8dab9288-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.547159] env[63028]: DEBUG nova.compute.manager [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] No waiting events found dispatching network-vif-plugged-34bcf965-61f1-430c-a4dd-fab458782af7 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1085.547326] env[63028]: WARNING nova.compute.manager [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Received unexpected event network-vif-plugged-34bcf965-61f1-430c-a4dd-fab458782af7 for instance with vm_state building and task_state spawning. [ 1085.547481] env[63028]: DEBUG nova.compute.manager [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Received event network-changed-34bcf965-61f1-430c-a4dd-fab458782af7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1085.547631] env[63028]: DEBUG nova.compute.manager [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Refreshing instance network info cache due to event network-changed-34bcf965-61f1-430c-a4dd-fab458782af7. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1085.547797] env[63028]: DEBUG oslo_concurrency.lockutils [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] Acquiring lock "refresh_cache-2add1602-122e-41d7-af83-b71d8dab9288" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.548504] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5a1613-74f0-410f-91c0-647899c54f4a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.575607] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 1f8415cc-f544-4c89-9863-43d5ae9144e8/1f8415cc-f544-4c89-9863-43d5ae9144e8.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1085.576380] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80f937a3-54ce-4083-8860-e1950d3f807a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.599144] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1085.599144] env[63028]: value = "task-2736255" [ 1085.599144] env[63028]: _type = "Task" [ 1085.599144] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.607760] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736255, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.637741] env[63028]: DEBUG nova.network.neutron [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1085.817448] env[63028]: DEBUG nova.network.neutron [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Updating instance_info_cache with network_info: [{"id": "34bcf965-61f1-430c-a4dd-fab458782af7", "address": "fa:16:3e:5a:80:f1", "network": {"id": "ec0b7321-4a1e-4bbf-9818-d89459fad099", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1052357008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "827319c1bd1d4a92842c023f78e9036f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8fedd232-bfc1-4e7f-bd5e-c43ef8f2f08a", "external-id": "nsx-vlan-transportzone-925", "segmentation_id": 925, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34bcf965-61", "ovs_interfaceid": "34bcf965-61f1-430c-a4dd-fab458782af7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.943825] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ca6e98-5b1d-cccd-ae0e-660d8cfbbd95, 'name': SearchDatastore_Task, 'duration_secs': 0.012493} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.944607] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afa1d688-8bd8-486a-98b6-1395e74b2ae2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.950185] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1085.950185] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5219f237-1d54-90cd-a81c-0f21c86892ca" [ 1085.950185] env[63028]: _type = "Task" [ 1085.950185] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.958739] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5219f237-1d54-90cd-a81c-0f21c86892ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.036085] env[63028]: DEBUG nova.scheduler.client.report [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1086.110217] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736255, 'name': ReconfigVM_Task, 'duration_secs': 0.329893} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.110518] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 1f8415cc-f544-4c89-9863-43d5ae9144e8/1f8415cc-f544-4c89-9863-43d5ae9144e8.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1086.111203] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71f19739-1b80-4a2f-8cb7-bed64f0e3149 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.119140] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1086.119140] env[63028]: value = "task-2736258" [ 1086.119140] env[63028]: _type = "Task" [ 1086.119140] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.127668] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736258, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.167264] env[63028]: DEBUG nova.compute.manager [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1086.194086] env[63028]: DEBUG nova.virt.hardware [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1086.194455] env[63028]: DEBUG nova.virt.hardware [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1086.194655] env[63028]: DEBUG nova.virt.hardware [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1086.194831] env[63028]: DEBUG nova.virt.hardware [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1086.194979] env[63028]: DEBUG nova.virt.hardware [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1086.195229] env[63028]: DEBUG nova.virt.hardware [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1086.195359] env[63028]: DEBUG nova.virt.hardware [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1086.195561] env[63028]: DEBUG nova.virt.hardware [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1086.195755] env[63028]: DEBUG nova.virt.hardware [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1086.195924] env[63028]: DEBUG nova.virt.hardware [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1086.196112] env[63028]: DEBUG nova.virt.hardware [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1086.196993] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9dc1cc-e87c-48a1-a225-d0ee01946279 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.206147] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efe7801-736d-4ba1-a2fb-e281924485b3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.319711] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Releasing lock "refresh_cache-2add1602-122e-41d7-af83-b71d8dab9288" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.319960] env[63028]: DEBUG nova.compute.manager [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Instance network_info: |[{"id": "34bcf965-61f1-430c-a4dd-fab458782af7", "address": "fa:16:3e:5a:80:f1", "network": {"id": "ec0b7321-4a1e-4bbf-9818-d89459fad099", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1052357008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "827319c1bd1d4a92842c023f78e9036f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8fedd232-bfc1-4e7f-bd5e-c43ef8f2f08a", "external-id": "nsx-vlan-transportzone-925", "segmentation_id": 925, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34bcf965-61", "ovs_interfaceid": "34bcf965-61f1-430c-a4dd-fab458782af7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1086.320343] env[63028]: DEBUG oslo_concurrency.lockutils [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] Acquired lock "refresh_cache-2add1602-122e-41d7-af83-b71d8dab9288" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.320452] env[63028]: DEBUG nova.network.neutron [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Refreshing network info cache for port 34bcf965-61f1-430c-a4dd-fab458782af7 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1086.321834] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:80:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8fedd232-bfc1-4e7f-bd5e-c43ef8f2f08a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34bcf965-61f1-430c-a4dd-fab458782af7', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1086.328945] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Creating folder: Project (827319c1bd1d4a92842c023f78e9036f). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1086.329926] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b63c711c-858d-4977-9ef3-88cb35251788 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.343213] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Created folder: Project (827319c1bd1d4a92842c023f78e9036f) in parent group-v550570. [ 1086.343418] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Creating folder: Instances. Parent ref: group-v550868. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1086.343686] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-433e6ef0-0956-4175-9c55-25edd9bf9876 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.353563] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Created folder: Instances in parent group-v550868. [ 1086.353829] env[63028]: DEBUG oslo.service.loopingcall [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1086.354091] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1086.354331] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1daad96-9a6b-4965-b05b-1cb91a7aff8e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.375091] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1086.375091] env[63028]: value = "task-2736261" [ 1086.375091] env[63028]: _type = "Task" [ 1086.375091] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.382880] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736261, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.460667] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5219f237-1d54-90cd-a81c-0f21c86892ca, 'name': SearchDatastore_Task, 'duration_secs': 0.010818} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.460877] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.461152] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 899496ae-8463-42e0-a287-b141d956fa0a/899496ae-8463-42e0-a287-b141d956fa0a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1086.461481] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf4fb50f-6534-42e6-a906-fe33ae014adf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.469875] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1086.469875] env[63028]: value = "task-2736262" [ 1086.469875] env[63028]: _type = "Task" [ 1086.469875] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.479071] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.497579] env[63028]: DEBUG nova.network.neutron [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Successfully updated port: 4c30382c-2b94-4990-b3d6-533480eb847b {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1086.541044] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.541589] env[63028]: DEBUG nova.compute.manager [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1086.544535] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.967s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.544796] env[63028]: DEBUG nova.objects.instance [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lazy-loading 'resources' on Instance uuid 2c2fb165-8906-4d42-a839-89ea6c8814ab {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.629662] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736258, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.886371] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736261, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.980426] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736262, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.999530] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "refresh_cache-6865f832-d409-4b9b-8b6c-33b0bf07d2b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.999763] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired lock "refresh_cache-6865f832-d409-4b9b-8b6c-33b0bf07d2b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.000049] env[63028]: DEBUG nova.network.neutron [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1087.036105] env[63028]: DEBUG nova.network.neutron [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Updated VIF entry in instance network info cache for port 34bcf965-61f1-430c-a4dd-fab458782af7. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1087.036671] env[63028]: DEBUG nova.network.neutron [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Updating instance_info_cache with network_info: [{"id": "34bcf965-61f1-430c-a4dd-fab458782af7", "address": "fa:16:3e:5a:80:f1", "network": {"id": "ec0b7321-4a1e-4bbf-9818-d89459fad099", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1052357008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "827319c1bd1d4a92842c023f78e9036f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8fedd232-bfc1-4e7f-bd5e-c43ef8f2f08a", "external-id": "nsx-vlan-transportzone-925", "segmentation_id": 925, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34bcf965-61", "ovs_interfaceid": "34bcf965-61f1-430c-a4dd-fab458782af7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.047632] env[63028]: DEBUG nova.compute.utils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1087.050173] env[63028]: DEBUG nova.compute.manager [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1087.050517] env[63028]: DEBUG nova.network.neutron [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1087.102336] env[63028]: DEBUG nova.policy [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8478e45562394a0d8fafc5e3e1218fd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05118b378b5e4d838962db2378b381bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1087.134064] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736258, 'name': Rename_Task, 'duration_secs': 0.905753} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.134420] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.134690] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2b423e4-563d-46cf-bc1c-ce045e723b13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.145807] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1087.145807] env[63028]: value = "task-2736263" [ 1087.145807] env[63028]: _type = "Task" [ 1087.145807] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.158187] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736263, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.386576] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736261, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.406344] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b20676f-6cb7-487f-a82d-8ec48873698b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.415619] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891e5b2c-6c0d-42de-b6f3-ccfbb191aee4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.460495] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571ca9f7-8a08-4800-b21a-d500f981a0f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.469421] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886f1a47-64e4-4dff-a518-889887c6e4b2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.474173] env[63028]: DEBUG nova.network.neutron [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Successfully created port: b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1087.484576] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736262, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.863743} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.492314] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 899496ae-8463-42e0-a287-b141d956fa0a/899496ae-8463-42e0-a287-b141d956fa0a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1087.492569] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1087.493082] env[63028]: DEBUG nova.compute.provider_tree [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.494671] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-312572b6-7f4a-466f-840d-87ec7ac6689d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.504762] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1087.504762] env[63028]: value = "task-2736264" [ 1087.504762] env[63028]: _type = "Task" [ 1087.504762] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.515247] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736264, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.539724] env[63028]: DEBUG oslo_concurrency.lockutils [req-50e82e0b-0c51-4093-a6c5-c5cb2b719ef8 req-b665acfc-6060-44bc-9cb3-20f956952fba service nova] Releasing lock "refresh_cache-2add1602-122e-41d7-af83-b71d8dab9288" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.549494] env[63028]: DEBUG nova.network.neutron [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1087.558462] env[63028]: DEBUG nova.compute.manager [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1087.578757] env[63028]: DEBUG nova.compute.manager [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Received event network-vif-plugged-4c30382c-2b94-4990-b3d6-533480eb847b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1087.579106] env[63028]: DEBUG oslo_concurrency.lockutils [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] Acquiring lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.579356] env[63028]: DEBUG oslo_concurrency.lockutils [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.579423] env[63028]: DEBUG oslo_concurrency.lockutils [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.579692] env[63028]: DEBUG nova.compute.manager [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] No waiting events found dispatching network-vif-plugged-4c30382c-2b94-4990-b3d6-533480eb847b {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1087.579747] env[63028]: WARNING nova.compute.manager [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Received unexpected event network-vif-plugged-4c30382c-2b94-4990-b3d6-533480eb847b for instance with vm_state building and task_state spawning. [ 1087.579917] env[63028]: DEBUG nova.compute.manager [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Received event network-changed-4c30382c-2b94-4990-b3d6-533480eb847b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1087.580056] env[63028]: DEBUG nova.compute.manager [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Refreshing instance network info cache due to event network-changed-4c30382c-2b94-4990-b3d6-533480eb847b. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1087.580228] env[63028]: DEBUG oslo_concurrency.lockutils [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] Acquiring lock "refresh_cache-6865f832-d409-4b9b-8b6c-33b0bf07d2b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.658259] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736263, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.856735] env[63028]: DEBUG nova.network.neutron [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Updating instance_info_cache with network_info: [{"id": "4c30382c-2b94-4990-b3d6-533480eb847b", "address": "fa:16:3e:64:a0:ec", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c30382c-2b", "ovs_interfaceid": "4c30382c-2b94-4990-b3d6-533480eb847b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.889033] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736261, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.998605] env[63028]: DEBUG nova.scheduler.client.report [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1088.017621] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736264, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070681} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.019805] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1088.021062] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bcd381-ab50-4883-8b3c-237bce6f40b0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.050278] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 899496ae-8463-42e0-a287-b141d956fa0a/899496ae-8463-42e0-a287-b141d956fa0a.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1088.050912] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab0f140c-5901-4477-8816-b61e07fa5450 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.085987] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1088.085987] env[63028]: value = "task-2736266" [ 1088.085987] env[63028]: _type = "Task" [ 1088.085987] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.157147] env[63028]: DEBUG oslo_vmware.api [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736263, 'name': PowerOnVM_Task, 'duration_secs': 0.586343} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.157505] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1088.157788] env[63028]: INFO nova.compute.manager [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Took 9.31 seconds to spawn the instance on the hypervisor. [ 1088.158068] env[63028]: DEBUG nova.compute.manager [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1088.159399] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6885aa86-0c9a-4cb1-b64d-83a82eacbde7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.359337] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Releasing lock "refresh_cache-6865f832-d409-4b9b-8b6c-33b0bf07d2b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1088.359743] env[63028]: DEBUG nova.compute.manager [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Instance network_info: |[{"id": "4c30382c-2b94-4990-b3d6-533480eb847b", "address": "fa:16:3e:64:a0:ec", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c30382c-2b", "ovs_interfaceid": "4c30382c-2b94-4990-b3d6-533480eb847b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1088.360128] env[63028]: DEBUG oslo_concurrency.lockutils [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] Acquired lock "refresh_cache-6865f832-d409-4b9b-8b6c-33b0bf07d2b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.360413] env[63028]: DEBUG nova.network.neutron [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Refreshing network info cache for port 4c30382c-2b94-4990-b3d6-533480eb847b {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1088.361709] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:a0:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b356db78-99c7-4464-822c-fc7e193f7878', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c30382c-2b94-4990-b3d6-533480eb847b', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1088.369872] env[63028]: DEBUG oslo.service.loopingcall [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1088.371727] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1088.372412] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5522c4e8-c4bc-4dac-806d-9d26cbb5bc58 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.401522] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736261, 'name': CreateVM_Task, 'duration_secs': 1.582421} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.402868] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1088.403113] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1088.403113] env[63028]: value = "task-2736267" [ 1088.403113] env[63028]: _type = "Task" [ 1088.403113] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.403819] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.403996] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.404376] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1088.404748] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-198713d0-5e7c-4e33-b0e1-a9ef6ca27e34 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.416844] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736267, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.418401] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Waiting for the task: (returnval){ [ 1088.418401] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b2251f-84d3-31fe-2807-cfc0ef5dbbd9" [ 1088.418401] env[63028]: _type = "Task" [ 1088.418401] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.428760] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b2251f-84d3-31fe-2807-cfc0ef5dbbd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.508976] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.964s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.511517] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.704s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.511840] env[63028]: DEBUG nova.objects.instance [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lazy-loading 'resources' on Instance uuid 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.525398] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1088.525624] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550867', 'volume_id': '79c32ea7-4ed7-4f3a-9a4d-e052462647fc', 'name': 'volume-79c32ea7-4ed7-4f3a-9a4d-e052462647fc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f804ec95-0b97-4960-844d-b678b97fc401', 'attached_at': '', 'detached_at': '', 'volume_id': '79c32ea7-4ed7-4f3a-9a4d-e052462647fc', 'serial': '79c32ea7-4ed7-4f3a-9a4d-e052462647fc'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1088.526600] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00cd8c9c-f1e9-4696-9147-872ddf0d854f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.533533] env[63028]: INFO nova.scheduler.client.report [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted allocations for instance 2c2fb165-8906-4d42-a839-89ea6c8814ab [ 1088.551580] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a92efe-5a8c-4465-8f2e-30ee8003a7c8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.578592] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] volume-79c32ea7-4ed7-4f3a-9a4d-e052462647fc/volume-79c32ea7-4ed7-4f3a-9a4d-e052462647fc.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1088.578885] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae05f260-061f-4f9a-a7f1-cff86238e1e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.592859] env[63028]: DEBUG nova.compute.manager [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1088.605780] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736266, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.607069] env[63028]: DEBUG oslo_vmware.api [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1088.607069] env[63028]: value = "task-2736268" [ 1088.607069] env[63028]: _type = "Task" [ 1088.607069] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.615688] env[63028]: DEBUG oslo_vmware.api [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736268, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.623875] env[63028]: DEBUG nova.virt.hardware [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1088.624183] env[63028]: DEBUG nova.virt.hardware [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1088.624413] env[63028]: DEBUG nova.virt.hardware [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1088.624598] env[63028]: DEBUG nova.virt.hardware [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1088.624752] env[63028]: DEBUG nova.virt.hardware [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1088.624899] env[63028]: DEBUG nova.virt.hardware [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1088.625121] env[63028]: DEBUG nova.virt.hardware [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1088.625292] env[63028]: DEBUG nova.virt.hardware [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1088.626010] env[63028]: DEBUG nova.virt.hardware [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1088.626010] env[63028]: DEBUG nova.virt.hardware [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1088.626120] env[63028]: DEBUG nova.virt.hardware [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1088.627260] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e047ac-d373-4bca-bbd1-55eccd9f3db3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.637494] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482c7010-98a1-4208-a6b9-603f2df70617 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.679265] env[63028]: INFO nova.compute.manager [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Took 31.23 seconds to build instance. [ 1088.739586] env[63028]: INFO nova.compute.manager [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Rebuilding instance [ 1088.776898] env[63028]: DEBUG nova.compute.manager [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1088.777743] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27fb38b9-79b0-4caf-82fb-7d4aa58e30d5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.918708] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736267, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.928437] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b2251f-84d3-31fe-2807-cfc0ef5dbbd9, 'name': SearchDatastore_Task, 'duration_secs': 0.030478} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.929175] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1088.929175] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1088.929372] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.929467] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.929737] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1088.930033] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a5f334f-8297-4b24-a67d-69c23ba7a6f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.942619] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1088.942804] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1088.943649] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bf036bd-4408-41ae-ab46-261d4fc0ea14 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.950108] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Waiting for the task: (returnval){ [ 1088.950108] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525e29ec-a5bd-f11b-093a-c3bd318a6536" [ 1088.950108] env[63028]: _type = "Task" [ 1088.950108] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.962231] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525e29ec-a5bd-f11b-093a-c3bd318a6536, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.058141] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb531fce-c8e6-4b76-8558-fd7370aa3526 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "2c2fb165-8906-4d42-a839-89ea6c8814ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.913s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.108637] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736266, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.123293] env[63028]: DEBUG oslo_vmware.api [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736268, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.181994] env[63028]: DEBUG oslo_concurrency.lockutils [None req-77578b6f-e071-485a-b276-8efcef9701e9 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "1f8415cc-f544-4c89-9863-43d5ae9144e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.742s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.339570] env[63028]: DEBUG nova.network.neutron [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Updated VIF entry in instance network info cache for port 4c30382c-2b94-4990-b3d6-533480eb847b. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1089.340580] env[63028]: DEBUG nova.network.neutron [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Updating instance_info_cache with network_info: [{"id": "4c30382c-2b94-4990-b3d6-533480eb847b", "address": "fa:16:3e:64:a0:ec", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c30382c-2b", "ovs_interfaceid": "4c30382c-2b94-4990-b3d6-533480eb847b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.357296] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac435363-4905-4ba6-9600-687e3cea7a95 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.363244] env[63028]: DEBUG nova.network.neutron [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Successfully updated port: b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1089.371986] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d336c9-5a14-45e6-9273-92828f7e87b5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.415533] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e2752a-b486-4fd3-a4c3-d30bd1fe0f37 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.420179] env[63028]: DEBUG nova.compute.manager [req-13096b62-6897-40f7-90f4-50066fec7b7f req-cca087af-59f2-49ad-b347-30d71ad2baec service nova] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Received event network-vif-plugged-b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1089.420534] env[63028]: DEBUG oslo_concurrency.lockutils [req-13096b62-6897-40f7-90f4-50066fec7b7f req-cca087af-59f2-49ad-b347-30d71ad2baec service nova] Acquiring lock "a7ff444e-43bc-4925-9754-86ff30de6751-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.420879] env[63028]: DEBUG oslo_concurrency.lockutils [req-13096b62-6897-40f7-90f4-50066fec7b7f req-cca087af-59f2-49ad-b347-30d71ad2baec service nova] Lock "a7ff444e-43bc-4925-9754-86ff30de6751-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.421189] env[63028]: DEBUG oslo_concurrency.lockutils [req-13096b62-6897-40f7-90f4-50066fec7b7f req-cca087af-59f2-49ad-b347-30d71ad2baec service nova] Lock "a7ff444e-43bc-4925-9754-86ff30de6751-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.421493] env[63028]: DEBUG nova.compute.manager [req-13096b62-6897-40f7-90f4-50066fec7b7f req-cca087af-59f2-49ad-b347-30d71ad2baec service nova] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] No waiting events found dispatching network-vif-plugged-b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1089.421785] env[63028]: WARNING nova.compute.manager [req-13096b62-6897-40f7-90f4-50066fec7b7f req-cca087af-59f2-49ad-b347-30d71ad2baec service nova] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Received unexpected event network-vif-plugged-b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c for instance with vm_state building and task_state spawning. [ 1089.436773] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebfd53f-a12d-4172-9815-55ac4fe1f018 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.443408] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736267, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.456918] env[63028]: DEBUG nova.compute.provider_tree [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.470543] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525e29ec-a5bd-f11b-093a-c3bd318a6536, 'name': SearchDatastore_Task, 'duration_secs': 0.011415} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.472929] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49ef9aab-aaec-43e4-a059-ec62905f624d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.481191] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Waiting for the task: (returnval){ [ 1089.481191] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a3394a-7bf8-fb74-83c9-2ab9c3d5bd14" [ 1089.481191] env[63028]: _type = "Task" [ 1089.481191] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.496922] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a3394a-7bf8-fb74-83c9-2ab9c3d5bd14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.606328] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736266, 'name': ReconfigVM_Task, 'duration_secs': 1.413786} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.606621] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 899496ae-8463-42e0-a287-b141d956fa0a/899496ae-8463-42e0-a287-b141d956fa0a.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1089.607370] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12051d27-cad2-403c-8d65-2a59e157ff79 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.618861] env[63028]: DEBUG oslo_vmware.api [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736268, 'name': ReconfigVM_Task, 'duration_secs': 0.888098} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.620286] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Reconfigured VM instance instance-00000062 to attach disk [datastore1] volume-79c32ea7-4ed7-4f3a-9a4d-e052462647fc/volume-79c32ea7-4ed7-4f3a-9a4d-e052462647fc.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1089.626070] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1089.626070] env[63028]: value = "task-2736269" [ 1089.626070] env[63028]: _type = "Task" [ 1089.626070] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.626295] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad46574e-4d5d-49b9-8bc3-a4c2b5acf545 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.646204] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736269, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.647754] env[63028]: DEBUG oslo_vmware.api [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1089.647754] env[63028]: value = "task-2736270" [ 1089.647754] env[63028]: _type = "Task" [ 1089.647754] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.656482] env[63028]: DEBUG oslo_vmware.api [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736270, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.763168] env[63028]: DEBUG nova.compute.manager [req-38323e1f-1ef0-4952-833f-0caafcc4e7a9 req-72ab0ec8-80c3-4c42-b932-2a44deb91db0 service nova] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Received event network-changed-b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1089.763478] env[63028]: DEBUG nova.compute.manager [req-38323e1f-1ef0-4952-833f-0caafcc4e7a9 req-72ab0ec8-80c3-4c42-b932-2a44deb91db0 service nova] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Refreshing instance network info cache due to event network-changed-b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1089.763795] env[63028]: DEBUG oslo_concurrency.lockutils [req-38323e1f-1ef0-4952-833f-0caafcc4e7a9 req-72ab0ec8-80c3-4c42-b932-2a44deb91db0 service nova] Acquiring lock "refresh_cache-a7ff444e-43bc-4925-9754-86ff30de6751" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.764291] env[63028]: DEBUG oslo_concurrency.lockutils [req-38323e1f-1ef0-4952-833f-0caafcc4e7a9 req-72ab0ec8-80c3-4c42-b932-2a44deb91db0 service nova] Acquired lock "refresh_cache-a7ff444e-43bc-4925-9754-86ff30de6751" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.764291] env[63028]: DEBUG nova.network.neutron [req-38323e1f-1ef0-4952-833f-0caafcc4e7a9 req-72ab0ec8-80c3-4c42-b932-2a44deb91db0 service nova] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Refreshing network info cache for port b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1089.792383] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.792695] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-768ef6f0-d4e3-4326-9cc0-e0cbf5b3c81a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.801246] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1089.801246] env[63028]: value = "task-2736271" [ 1089.801246] env[63028]: _type = "Task" [ 1089.801246] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.812057] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736271, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.845276] env[63028]: DEBUG oslo_concurrency.lockutils [req-b495bc93-c70c-4bbc-8cbc-0f09dca5c255 req-270711b3-a6a8-4197-ad33-c5b4331800f1 service nova] Releasing lock "refresh_cache-6865f832-d409-4b9b-8b6c-33b0bf07d2b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.867573] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "refresh_cache-a7ff444e-43bc-4925-9754-86ff30de6751" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.924898] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736267, 'name': CreateVM_Task, 'duration_secs': 1.518068} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.925272] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1089.926036] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.926247] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.926655] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1089.926913] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10efd7da-a8d0-4678-b404-04bf7d881c3f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.933105] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1089.933105] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52eb939c-bdd1-963b-7bf8-70984ff13614" [ 1089.933105] env[63028]: _type = "Task" [ 1089.933105] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.944216] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52eb939c-bdd1-963b-7bf8-70984ff13614, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.963414] env[63028]: DEBUG nova.scheduler.client.report [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.993399] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a3394a-7bf8-fb74-83c9-2ab9c3d5bd14, 'name': SearchDatastore_Task, 'duration_secs': 0.017624} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.993679] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.993937] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 2add1602-122e-41d7-af83-b71d8dab9288/2add1602-122e-41d7-af83-b71d8dab9288.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1089.994219] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b084ba3-5b5b-4845-b945-a43b18fb9a50 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.002304] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Waiting for the task: (returnval){ [ 1090.002304] env[63028]: value = "task-2736272" [ 1090.002304] env[63028]: _type = "Task" [ 1090.002304] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.009824] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c05995-7aa5-4cf8-a87c-0ec7f94a0ced {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.016351] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.021673] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4e806310-69c4-4ee0-b7b9-cabe5d97cb28 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Suspending the VM {{(pid=63028) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1090.022043] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-5f524a44-92dc-480a-b6b3-a6454e368ce8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.032443] env[63028]: DEBUG oslo_vmware.api [None req-4e806310-69c4-4ee0-b7b9-cabe5d97cb28 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1090.032443] env[63028]: value = "task-2736274" [ 1090.032443] env[63028]: _type = "Task" [ 1090.032443] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.043547] env[63028]: DEBUG oslo_vmware.api [None req-4e806310-69c4-4ee0-b7b9-cabe5d97cb28 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736274, 'name': SuspendVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.078019] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "b3930760-1888-4f80-85d8-65120a25f275" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.078397] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "b3930760-1888-4f80-85d8-65120a25f275" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.078839] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "b3930760-1888-4f80-85d8-65120a25f275-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.078839] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "b3930760-1888-4f80-85d8-65120a25f275-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.079009] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "b3930760-1888-4f80-85d8-65120a25f275-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.081347] env[63028]: INFO nova.compute.manager [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Terminating instance [ 1090.147378] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736269, 'name': Rename_Task, 'duration_secs': 0.287664} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.147746] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1090.148047] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f77df2a-242c-4ff6-99e3-0c40c86e1dbd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.159983] env[63028]: DEBUG oslo_vmware.api [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736270, 'name': ReconfigVM_Task, 'duration_secs': 0.314035} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.161450] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550867', 'volume_id': '79c32ea7-4ed7-4f3a-9a4d-e052462647fc', 'name': 'volume-79c32ea7-4ed7-4f3a-9a4d-e052462647fc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f804ec95-0b97-4960-844d-b678b97fc401', 'attached_at': '', 'detached_at': '', 'volume_id': '79c32ea7-4ed7-4f3a-9a4d-e052462647fc', 'serial': '79c32ea7-4ed7-4f3a-9a4d-e052462647fc'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1090.164128] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1090.164128] env[63028]: value = "task-2736275" [ 1090.164128] env[63028]: _type = "Task" [ 1090.164128] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.181217] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736275, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.313177] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736271, 'name': PowerOffVM_Task, 'duration_secs': 0.283149} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.313513] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.313789] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1090.314731] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81e2946-0a6d-4a95-98bc-0511e4c466f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.318300] env[63028]: DEBUG nova.network.neutron [req-38323e1f-1ef0-4952-833f-0caafcc4e7a9 req-72ab0ec8-80c3-4c42-b932-2a44deb91db0 service nova] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1090.326215] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.326568] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4705d9d-6b99-436c-bff1-b7eb5ffd140b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.406450] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1090.406889] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1090.407215] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleting the datastore file [datastore1] 60d18f14-536a-4b0f-912b-21f3f5a30d28 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.408416] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50a89ca3-086e-4755-85f4-04afbd9a4fc7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.418093] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1090.418093] env[63028]: value = "task-2736277" [ 1090.418093] env[63028]: _type = "Task" [ 1090.418093] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.431528] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736277, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.432946] env[63028]: DEBUG nova.network.neutron [req-38323e1f-1ef0-4952-833f-0caafcc4e7a9 req-72ab0ec8-80c3-4c42-b932-2a44deb91db0 service nova] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.450440] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52eb939c-bdd1-963b-7bf8-70984ff13614, 'name': SearchDatastore_Task, 'duration_secs': 0.024369} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.451708] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.452096] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1090.452467] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.452718] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.453010] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1090.453732] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66aa3c6a-6a3a-44fe-81ef-a27f56383cf2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.466143] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1090.466143] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1090.470242] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a703e348-09c7-499f-a1e4-2a092cbd2922 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.477209] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.966s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.481285] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.174s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.481572] env[63028]: DEBUG nova.objects.instance [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lazy-loading 'resources' on Instance uuid 85aafadb-81d6-4687-aed1-fbe829e5f95f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1090.487021] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1090.487021] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525eb71a-890c-dbbb-a675-fb618e970644" [ 1090.487021] env[63028]: _type = "Task" [ 1090.487021] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.498239] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525eb71a-890c-dbbb-a675-fb618e970644, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.512464] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736272, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.513123] env[63028]: INFO nova.scheduler.client.report [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted allocations for instance 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6 [ 1090.543399] env[63028]: DEBUG oslo_vmware.api [None req-4e806310-69c4-4ee0-b7b9-cabe5d97cb28 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736274, 'name': SuspendVM_Task} progress is 58%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.586642] env[63028]: DEBUG nova.compute.manager [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1090.586894] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1090.587809] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85dea8f-4698-476e-9dfd-2ddc63b5df75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.597752] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1090.598104] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9df2367-0d4a-40f5-b29f-d4780c99a2f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.608069] env[63028]: DEBUG oslo_vmware.api [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1090.608069] env[63028]: value = "task-2736278" [ 1090.608069] env[63028]: _type = "Task" [ 1090.608069] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.618474] env[63028]: DEBUG oslo_vmware.api [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736278, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.678687] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736275, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.930515] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736277, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.298259} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.930917] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.931191] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1090.931525] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1090.940354] env[63028]: DEBUG oslo_concurrency.lockutils [req-38323e1f-1ef0-4952-833f-0caafcc4e7a9 req-72ab0ec8-80c3-4c42-b932-2a44deb91db0 service nova] Releasing lock "refresh_cache-a7ff444e-43bc-4925-9754-86ff30de6751" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.940794] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "refresh_cache-a7ff444e-43bc-4925-9754-86ff30de6751" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.940984] env[63028]: DEBUG nova.network.neutron [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1091.001792] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525eb71a-890c-dbbb-a675-fb618e970644, 'name': SearchDatastore_Task, 'duration_secs': 0.01184} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.002284] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38e2a77d-2204-43b8-8e62-38f80e3232e0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.017046] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1091.017046] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524f12a2-9643-3377-b9fe-b945c52d02a8" [ 1091.017046] env[63028]: _type = "Task" [ 1091.017046] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.029236] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736272, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.867335} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.033436] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ee257483-2758-47c4-bbf1-8065e5a2447c tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "56e6ade9-893b-4c85-b0b8-e9f7b12cbad6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.604s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.035456] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 2add1602-122e-41d7-af83-b71d8dab9288/2add1602-122e-41d7-af83-b71d8dab9288.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1091.035711] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1091.037044] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1165c2b9-ec5b-4f4e-b191-e234aab0457f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.049652] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524f12a2-9643-3377-b9fe-b945c52d02a8, 'name': SearchDatastore_Task, 'duration_secs': 0.015182} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.054760] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.055175] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 6865f832-d409-4b9b-8b6c-33b0bf07d2b2/6865f832-d409-4b9b-8b6c-33b0bf07d2b2.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1091.056238] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96e9fcdf-01eb-41e3-917d-03d833135c36 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.063643] env[63028]: DEBUG oslo_vmware.api [None req-4e806310-69c4-4ee0-b7b9-cabe5d97cb28 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736274, 'name': SuspendVM_Task, 'duration_secs': 1.015346} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.065923] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4e806310-69c4-4ee0-b7b9-cabe5d97cb28 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Suspended the VM {{(pid=63028) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1091.065923] env[63028]: DEBUG nova.compute.manager [None req-4e806310-69c4-4ee0-b7b9-cabe5d97cb28 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1091.066382] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Waiting for the task: (returnval){ [ 1091.066382] env[63028]: value = "task-2736279" [ 1091.066382] env[63028]: _type = "Task" [ 1091.066382] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.069677] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd03ca24-ab1c-412c-8b67-f6d259aadb5b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.078136] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1091.078136] env[63028]: value = "task-2736280" [ 1091.078136] env[63028]: _type = "Task" [ 1091.078136] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.085650] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736279, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.106331] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.119445] env[63028]: DEBUG oslo_vmware.api [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736278, 'name': PowerOffVM_Task, 'duration_secs': 0.30203} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.123080] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1091.123349] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1091.124960] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca2cb88d-94a2-4a9c-9f6d-ecd1d31eae3a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.177665] env[63028]: DEBUG oslo_vmware.api [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736275, 'name': PowerOnVM_Task, 'duration_secs': 0.87597} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.180497] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1091.180801] env[63028]: INFO nova.compute.manager [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Took 9.84 seconds to spawn the instance on the hypervisor. [ 1091.180930] env[63028]: DEBUG nova.compute.manager [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1091.182074] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e5eb45-258b-4ce1-b653-351286760f80 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.202957] env[63028]: DEBUG nova.objects.instance [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lazy-loading 'flavor' on Instance uuid f804ec95-0b97-4960-844d-b678b97fc401 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.256564] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1091.256880] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1091.257103] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleting the datastore file [datastore2] b3930760-1888-4f80-85d8-65120a25f275 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1091.257380] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4db061b-daa2-4667-aab9-a3c8911992d4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.266912] env[63028]: DEBUG oslo_vmware.api [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1091.266912] env[63028]: value = "task-2736282" [ 1091.266912] env[63028]: _type = "Task" [ 1091.266912] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.275540] env[63028]: DEBUG oslo_vmware.api [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736282, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.318847] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2addddbe-5a70-4060-a502-01e4f7f2490e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.328175] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b55c17b-2def-44c8-996e-e50d7a22ffe4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.365356] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7882ddd-1731-4bf9-8373-6d34d863a5bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.374981] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44c7717-743f-4c64-a1e6-2445ad0141f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.394254] env[63028]: DEBUG nova.compute.provider_tree [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.503432] env[63028]: DEBUG nova.network.neutron [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1091.586547] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736279, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072424} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.587243] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.588429] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7e5b3f-8666-4a51-89ca-9330cedf79e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.596451] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736280, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.617780] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] 2add1602-122e-41d7-af83-b71d8dab9288/2add1602-122e-41d7-af83-b71d8dab9288.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.621606] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6d17fbd-dd3a-41bc-bc3e-6741d6f8f8b5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.643136] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Waiting for the task: (returnval){ [ 1091.643136] env[63028]: value = "task-2736283" [ 1091.643136] env[63028]: _type = "Task" [ 1091.643136] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.654796] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736283, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.702956] env[63028]: INFO nova.compute.manager [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Took 28.84 seconds to build instance. [ 1091.708632] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0aeaf461-39d1-4229-b36e-ab90d73f7d15 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.292s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.726056] env[63028]: DEBUG nova.network.neutron [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Updating instance_info_cache with network_info: [{"id": "b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c", "address": "fa:16:3e:e5:49:a9", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7d0f379-6c", "ovs_interfaceid": "b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.778374] env[63028]: DEBUG oslo_vmware.api [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736282, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293323} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.778679] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.778833] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1091.779016] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1091.779197] env[63028]: INFO nova.compute.manager [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: b3930760-1888-4f80-85d8-65120a25f275] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1091.779466] env[63028]: DEBUG oslo.service.loopingcall [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.779684] env[63028]: DEBUG nova.compute.manager [-] [instance: b3930760-1888-4f80-85d8-65120a25f275] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1091.779782] env[63028]: DEBUG nova.network.neutron [-] [instance: b3930760-1888-4f80-85d8-65120a25f275] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1091.898307] env[63028]: DEBUG nova.scheduler.client.report [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1091.975804] env[63028]: DEBUG nova.virt.hardware [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1091.976102] env[63028]: DEBUG nova.virt.hardware [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1091.976856] env[63028]: DEBUG nova.virt.hardware [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1091.976856] env[63028]: DEBUG nova.virt.hardware [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1091.976856] env[63028]: DEBUG nova.virt.hardware [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1091.976856] env[63028]: DEBUG nova.virt.hardware [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1091.977152] env[63028]: DEBUG nova.virt.hardware [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1091.977901] env[63028]: DEBUG nova.virt.hardware [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1091.977901] env[63028]: DEBUG nova.virt.hardware [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1091.977901] env[63028]: DEBUG nova.virt.hardware [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1091.978016] env[63028]: DEBUG nova.virt.hardware [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1091.979546] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce63697a-f44b-4b59-9ecf-820333eab30c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.989848] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0ad580-e9c7-4d59-83f6-7f9d62bdc7de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.011591] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:35:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1092.020316] env[63028]: DEBUG oslo.service.loopingcall [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1092.020643] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1092.021139] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c931acee-3a7a-4e85-a5d2-5a6aae10dbfc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.042131] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1092.042131] env[63028]: value = "task-2736284" [ 1092.042131] env[63028]: _type = "Task" [ 1092.042131] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.052272] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736284, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.105608] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736280, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526943} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.105916] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 6865f832-d409-4b9b-8b6c-33b0bf07d2b2/6865f832-d409-4b9b-8b6c-33b0bf07d2b2.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1092.106145] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1092.106465] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd78d1c5-96ba-4c06-a77b-4ef2230dd3ae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.116538] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1092.116538] env[63028]: value = "task-2736286" [ 1092.116538] env[63028]: _type = "Task" [ 1092.116538] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.127335] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736286, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.154273] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736283, 'name': ReconfigVM_Task, 'duration_secs': 0.301747} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.154599] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Reconfigured VM instance instance-0000006b to attach disk [datastore2] 2add1602-122e-41d7-af83-b71d8dab9288/2add1602-122e-41d7-af83-b71d8dab9288.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.156210] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a86b675-5d45-41ba-86e3-e8ac85a568a2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.160113] env[63028]: DEBUG nova.compute.manager [req-62832bb3-4a8e-4c08-8469-46120a8e836a req-a23a2ead-184c-43db-97e0-108d0e937c6c service nova] [instance: b3930760-1888-4f80-85d8-65120a25f275] Received event network-vif-deleted-f5040918-0c62-4758-8f50-01303c2a0180 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1092.160113] env[63028]: INFO nova.compute.manager [req-62832bb3-4a8e-4c08-8469-46120a8e836a req-a23a2ead-184c-43db-97e0-108d0e937c6c service nova] [instance: b3930760-1888-4f80-85d8-65120a25f275] Neutron deleted interface f5040918-0c62-4758-8f50-01303c2a0180; detaching it from the instance and deleting it from the info cache [ 1092.160113] env[63028]: DEBUG nova.network.neutron [req-62832bb3-4a8e-4c08-8469-46120a8e836a req-a23a2ead-184c-43db-97e0-108d0e937c6c service nova] [instance: b3930760-1888-4f80-85d8-65120a25f275] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.166549] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Waiting for the task: (returnval){ [ 1092.166549] env[63028]: value = "task-2736287" [ 1092.166549] env[63028]: _type = "Task" [ 1092.166549] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.177232] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736287, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.205665] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18cab1b2-a93d-4707-a623-a0b75ca018c2 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "899496ae-8463-42e0-a287-b141d956fa0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.347s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.229264] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "refresh_cache-a7ff444e-43bc-4925-9754-86ff30de6751" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.229609] env[63028]: DEBUG nova.compute.manager [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Instance network_info: |[{"id": "b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c", "address": "fa:16:3e:e5:49:a9", "network": {"id": "e8109779-7eb7-4751-b6e5-c1e3b007cb9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1892394824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05118b378b5e4d838962db2378b381bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5446413d-c3b0-4cd2-a962-62240db178ac", "external-id": "nsx-vlan-transportzone-528", "segmentation_id": 528, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7d0f379-6c", "ovs_interfaceid": "b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1092.230418] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:49:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5446413d-c3b0-4cd2-a962-62240db178ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1092.238386] env[63028]: DEBUG oslo.service.loopingcall [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1092.238624] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1092.238890] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-757ab100-e7e2-45e3-843b-202abe65f380 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.261643] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1092.261643] env[63028]: value = "task-2736288" [ 1092.261643] env[63028]: _type = "Task" [ 1092.261643] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.270074] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736288, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.404622] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.923s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.408046] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.331s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.410858] env[63028]: INFO nova.compute.claims [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1092.432211] env[63028]: INFO nova.scheduler.client.report [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleted allocations for instance 85aafadb-81d6-4687-aed1-fbe829e5f95f [ 1092.492541] env[63028]: DEBUG nova.compute.manager [req-f4b333d6-4e11-4b64-b43c-fef1eeffd7f4 req-032fcb16-df0a-42d7-be5e-241a3f0d4de1 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Received event network-changed-60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1092.492796] env[63028]: DEBUG nova.compute.manager [req-f4b333d6-4e11-4b64-b43c-fef1eeffd7f4 req-032fcb16-df0a-42d7-be5e-241a3f0d4de1 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Refreshing instance network info cache due to event network-changed-60891063-6c30-480a-8e2b-f3960496f2fd. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1092.493143] env[63028]: DEBUG oslo_concurrency.lockutils [req-f4b333d6-4e11-4b64-b43c-fef1eeffd7f4 req-032fcb16-df0a-42d7-be5e-241a3f0d4de1 service nova] Acquiring lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.493387] env[63028]: DEBUG oslo_concurrency.lockutils [req-f4b333d6-4e11-4b64-b43c-fef1eeffd7f4 req-032fcb16-df0a-42d7-be5e-241a3f0d4de1 service nova] Acquired lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.493658] env[63028]: DEBUG nova.network.neutron [req-f4b333d6-4e11-4b64-b43c-fef1eeffd7f4 req-032fcb16-df0a-42d7-be5e-241a3f0d4de1 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Refreshing network info cache for port 60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1092.555549] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736284, 'name': CreateVM_Task, 'duration_secs': 0.41621} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.555721] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1092.556476] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.557028] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.557028] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1092.557394] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c6c767e-5fdb-4537-a984-ded75282ad7f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.564074] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1092.564074] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525fbcb7-cb12-39ed-4b6d-92253cc3672e" [ 1092.564074] env[63028]: _type = "Task" [ 1092.564074] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.573766] env[63028]: DEBUG oslo_concurrency.lockutils [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "1f8415cc-f544-4c89-9863-43d5ae9144e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.573766] env[63028]: DEBUG oslo_concurrency.lockutils [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "1f8415cc-f544-4c89-9863-43d5ae9144e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.573766] env[63028]: DEBUG oslo_concurrency.lockutils [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "1f8415cc-f544-4c89-9863-43d5ae9144e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.573766] env[63028]: DEBUG oslo_concurrency.lockutils [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "1f8415cc-f544-4c89-9863-43d5ae9144e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.573766] env[63028]: DEBUG oslo_concurrency.lockutils [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "1f8415cc-f544-4c89-9863-43d5ae9144e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.582233] env[63028]: INFO nova.compute.manager [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Terminating instance [ 1092.590380] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525fbcb7-cb12-39ed-4b6d-92253cc3672e, 'name': SearchDatastore_Task, 'duration_secs': 0.012096} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.590686] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.590916] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1092.591161] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.591309] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.591491] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1092.591775] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3647c7ed-94b2-46dd-9428-d0e36fddcd0a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.597383] env[63028]: DEBUG nova.network.neutron [-] [instance: b3930760-1888-4f80-85d8-65120a25f275] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.602395] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1092.602589] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1092.603381] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-befe03ef-8b85-428a-84c6-62e4f014060e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.615084] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1092.615084] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5258a639-4351-b89b-6168-f81fabdfcfa6" [ 1092.615084] env[63028]: _type = "Task" [ 1092.615084] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.629104] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5258a639-4351-b89b-6168-f81fabdfcfa6, 'name': SearchDatastore_Task, 'duration_secs': 0.010285} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.632696] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736286, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109292} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.633430] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc4774f0-1aac-4a4d-bac8-0255905fd956 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.637109] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1092.638159] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64594cd-e6fc-490d-988e-069d71f16070 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.645224] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1092.645224] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5224c51d-ebbb-3c8b-897e-d6a9c1911aec" [ 1092.645224] env[63028]: _type = "Task" [ 1092.645224] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.667946] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 6865f832-d409-4b9b-8b6c-33b0bf07d2b2/6865f832-d409-4b9b-8b6c-33b0bf07d2b2.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1092.671811] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba774e57-b88a-411b-b4e0-228562a23908 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.687508] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be0a614e-2006-4a3e-b8b8-52306f7d3544 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.695975] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "f804ec95-0b97-4960-844d-b678b97fc401" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.696305] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.702586] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5224c51d-ebbb-3c8b-897e-d6a9c1911aec, 'name': SearchDatastore_Task, 'duration_secs': 0.045312} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.707342] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.707611] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 60d18f14-536a-4b0f-912b-21f3f5a30d28/60d18f14-536a-4b0f-912b-21f3f5a30d28.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1092.707940] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736287, 'name': Rename_Task, 'duration_secs': 0.218537} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.708523] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1092.708523] env[63028]: value = "task-2736289" [ 1092.708523] env[63028]: _type = "Task" [ 1092.708523] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.709555] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10a367f3-4a3d-48d8-9019-a62d75ee9046 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.711459] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1092.714339] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279593ee-9040-44ac-9368-964ca96d3890 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.725462] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec3529a2-66cd-4b30-8c4c-a246dfaecfc5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.740505] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736289, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.742908] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1092.742908] env[63028]: value = "task-2736290" [ 1092.742908] env[63028]: _type = "Task" [ 1092.742908] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.743485] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Waiting for the task: (returnval){ [ 1092.743485] env[63028]: value = "task-2736291" [ 1092.743485] env[63028]: _type = "Task" [ 1092.743485] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.763554] env[63028]: DEBUG nova.compute.manager [req-62832bb3-4a8e-4c08-8469-46120a8e836a req-a23a2ead-184c-43db-97e0-108d0e937c6c service nova] [instance: b3930760-1888-4f80-85d8-65120a25f275] Detach interface failed, port_id=f5040918-0c62-4758-8f50-01303c2a0180, reason: Instance b3930760-1888-4f80-85d8-65120a25f275 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1092.774388] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736290, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.774653] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736291, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.780724] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736288, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.942578] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5e48a89d-56e9-4d31-a71b-8616dcfca951 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "85aafadb-81d6-4687-aed1-fbe829e5f95f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.606s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.087428] env[63028]: DEBUG nova.compute.manager [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1093.087729] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1093.088835] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e637c259-5b0a-4f53-bc1c-f5361c4b4a08 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.101415] env[63028]: INFO nova.compute.manager [-] [instance: b3930760-1888-4f80-85d8-65120a25f275] Took 1.32 seconds to deallocate network for instance. [ 1093.101781] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1093.105212] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5d2ff62-4b48-499e-9dbd-29d22edd5e5a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.201882] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1093.201882] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1093.202333] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleting the datastore file [datastore2] 1f8415cc-f544-4c89-9863-43d5ae9144e8 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1093.205181] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc7ede27-a26e-4378-bf4b-7a0327a14d93 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.208740] env[63028]: DEBUG nova.compute.utils [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1093.220267] env[63028]: DEBUG oslo_vmware.api [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1093.220267] env[63028]: value = "task-2736293" [ 1093.220267] env[63028]: _type = "Task" [ 1093.220267] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.222982] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736289, 'name': ReconfigVM_Task, 'duration_secs': 0.48028} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.228025] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 6865f832-d409-4b9b-8b6c-33b0bf07d2b2/6865f832-d409-4b9b-8b6c-33b0bf07d2b2.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1093.228997] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8f766b6-0cd1-49f2-903a-ba0c10a41410 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.239289] env[63028]: DEBUG oslo_vmware.api [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736293, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.241065] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1093.241065] env[63028]: value = "task-2736294" [ 1093.241065] env[63028]: _type = "Task" [ 1093.241065] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.252349] env[63028]: DEBUG nova.network.neutron [req-f4b333d6-4e11-4b64-b43c-fef1eeffd7f4 req-032fcb16-df0a-42d7-be5e-241a3f0d4de1 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updated VIF entry in instance network info cache for port 60891063-6c30-480a-8e2b-f3960496f2fd. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1093.252853] env[63028]: DEBUG nova.network.neutron [req-f4b333d6-4e11-4b64-b43c-fef1eeffd7f4 req-032fcb16-df0a-42d7-be5e-241a3f0d4de1 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updating instance_info_cache with network_info: [{"id": "60891063-6c30-480a-8e2b-f3960496f2fd", "address": "fa:16:3e:84:9a:c5", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60891063-6c", "ovs_interfaceid": "60891063-6c30-480a-8e2b-f3960496f2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.259847] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736294, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.263632] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736290, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.270169] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736291, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.278157] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736288, 'name': CreateVM_Task, 'duration_secs': 0.667689} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.278361] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1093.279030] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.279204] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.279521] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1093.279781] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-563ad314-7518-4fb8-be04-64238d2b29c5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.285349] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1093.285349] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52363a25-2726-d735-39d1-710014d6de6b" [ 1093.285349] env[63028]: _type = "Task" [ 1093.285349] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.298225] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52363a25-2726-d735-39d1-710014d6de6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.612600] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.665668] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3cd18a-1360-40c2-9ec9-3bd1956ce7e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.678961] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb45aec1-0c8f-451a-9d78-6b4b0d69976d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.719757] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.023s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.721647] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02dcedef-8c42-4add-81e2-33439b33f35e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.738928] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaac596d-8da7-4325-86c2-df58317627a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.743229] env[63028]: DEBUG oslo_vmware.api [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736293, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206276} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.743532] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1093.743749] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1093.743951] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1093.744459] env[63028]: INFO nova.compute.manager [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Took 0.66 seconds to destroy the instance on the hypervisor. [ 1093.744459] env[63028]: DEBUG oslo.service.loopingcall [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1093.748503] env[63028]: DEBUG nova.compute.manager [-] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1093.748852] env[63028]: DEBUG nova.network.neutron [-] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1093.764994] env[63028]: DEBUG oslo_concurrency.lockutils [req-f4b333d6-4e11-4b64-b43c-fef1eeffd7f4 req-032fcb16-df0a-42d7-be5e-241a3f0d4de1 service nova] Releasing lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.768022] env[63028]: DEBUG nova.compute.provider_tree [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.771376] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736294, 'name': Rename_Task, 'duration_secs': 0.196188} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.771910] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1093.772221] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5cc32a28-f0e6-4889-9746-082d01a7af7a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.780668] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736290, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555693} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.780956] env[63028]: DEBUG oslo_vmware.api [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736291, 'name': PowerOnVM_Task, 'duration_secs': 0.564676} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.782259] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 60d18f14-536a-4b0f-912b-21f3f5a30d28/60d18f14-536a-4b0f-912b-21f3f5a30d28.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1093.782536] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1093.782840] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1093.783562] env[63028]: INFO nova.compute.manager [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Took 10.00 seconds to spawn the instance on the hypervisor. [ 1093.783562] env[63028]: DEBUG nova.compute.manager [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1093.783925] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c025d2e8-48b5-4796-9607-b426293da695 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.787130] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8888065e-0db4-46ee-9f21-f9ec540d5a06 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.793022] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1093.793022] env[63028]: value = "task-2736295" [ 1093.793022] env[63028]: _type = "Task" [ 1093.793022] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.810492] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1093.810492] env[63028]: value = "task-2736296" [ 1093.810492] env[63028]: _type = "Task" [ 1093.810492] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.819026] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52363a25-2726-d735-39d1-710014d6de6b, 'name': SearchDatastore_Task, 'duration_secs': 0.02163} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.821203] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.821626] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1093.821977] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.822219] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.822470] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1093.827189] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b43cf14-4be1-47b7-a9aa-dcfee07203e2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.829572] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736295, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.836249] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.839707] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1093.839902] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1093.840684] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a164a402-5662-43aa-b08b-91e18e3fa9ee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.848573] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1093.848573] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ba82a7-91b2-0649-2e3f-4a86087585d7" [ 1093.848573] env[63028]: _type = "Task" [ 1093.848573] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.861229] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ba82a7-91b2-0649-2e3f-4a86087585d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.280274] env[63028]: DEBUG nova.scheduler.client.report [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.305614] env[63028]: DEBUG nova.compute.manager [req-270ca5cc-c5c2-4f50-8fde-87e1f13f53e5 req-3a262e14-08cf-4fbc-88f2-4ebd06d7175e service nova] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Received event network-vif-deleted-b2a35d65-e481-4d0a-8dc1-3b3b8dec509b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1094.306289] env[63028]: INFO nova.compute.manager [req-270ca5cc-c5c2-4f50-8fde-87e1f13f53e5 req-3a262e14-08cf-4fbc-88f2-4ebd06d7175e service nova] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Neutron deleted interface b2a35d65-e481-4d0a-8dc1-3b3b8dec509b; detaching it from the instance and deleting it from the info cache [ 1094.306289] env[63028]: DEBUG nova.network.neutron [req-270ca5cc-c5c2-4f50-8fde-87e1f13f53e5 req-3a262e14-08cf-4fbc-88f2-4ebd06d7175e service nova] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.312107] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736295, 'name': PowerOnVM_Task} progress is 74%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.326105] env[63028]: INFO nova.compute.manager [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Took 30.53 seconds to build instance. [ 1094.334224] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.153681} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.334953] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1094.335586] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0dc24f-d6d0-484a-b479-877b6a9f69b5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.367126] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 60d18f14-536a-4b0f-912b-21f3f5a30d28/60d18f14-536a-4b0f-912b-21f3f5a30d28.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1094.370627] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-493ac8ed-f914-4b48-8d22-5924695d54a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.394999] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ba82a7-91b2-0649-2e3f-4a86087585d7, 'name': SearchDatastore_Task, 'duration_secs': 0.019148} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.398415] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1094.398415] env[63028]: value = "task-2736298" [ 1094.398415] env[63028]: _type = "Task" [ 1094.398415] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.398711] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8390f346-c0dd-4108-9dcf-385e1a036b8a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.408939] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1094.408939] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d6a1b4-919c-29e5-35ea-60c8d528bf40" [ 1094.408939] env[63028]: _type = "Task" [ 1094.408939] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.416109] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736298, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.424916] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d6a1b4-919c-29e5-35ea-60c8d528bf40, 'name': SearchDatastore_Task, 'duration_secs': 0.011228} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.424916] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.424916] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] a7ff444e-43bc-4925-9754-86ff30de6751/a7ff444e-43bc-4925-9754-86ff30de6751.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1094.424916] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ced0cc51-7eb8-42a7-96b9-797040ff4197 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.433070] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1094.433070] env[63028]: value = "task-2736299" [ 1094.433070] env[63028]: _type = "Task" [ 1094.433070] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.441548] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736299, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.527732] env[63028]: DEBUG nova.compute.manager [req-f708af62-830b-4e17-9d33-648ad762fa47 req-d96d064c-c18b-4daa-b5d8-855f607e0d98 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Received event network-changed-197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1094.530354] env[63028]: DEBUG nova.compute.manager [req-f708af62-830b-4e17-9d33-648ad762fa47 req-d96d064c-c18b-4daa-b5d8-855f607e0d98 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Refreshing instance network info cache due to event network-changed-197b3459-f9f1-4fe3-a9ad-169350b4d637. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1094.530354] env[63028]: DEBUG oslo_concurrency.lockutils [req-f708af62-830b-4e17-9d33-648ad762fa47 req-d96d064c-c18b-4daa-b5d8-855f607e0d98 service nova] Acquiring lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1094.530354] env[63028]: DEBUG oslo_concurrency.lockutils [req-f708af62-830b-4e17-9d33-648ad762fa47 req-d96d064c-c18b-4daa-b5d8-855f607e0d98 service nova] Acquired lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.530354] env[63028]: DEBUG nova.network.neutron [req-f708af62-830b-4e17-9d33-648ad762fa47 req-d96d064c-c18b-4daa-b5d8-855f607e0d98 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Refreshing network info cache for port 197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1094.545781] env[63028]: DEBUG nova.network.neutron [-] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.785045] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "f804ec95-0b97-4960-844d-b678b97fc401" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.785045] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.785516] env[63028]: INFO nova.compute.manager [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Attaching volume 8a20dc5f-0351-4017-9a2d-4311f8616190 to /dev/sdc [ 1094.790045] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.382s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.790634] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1094.798539] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.701s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.801693] env[63028]: INFO nova.compute.claims [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1094.819256] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f11f5b98-3888-4695-b14e-2a242f9e7bfe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.835746] env[63028]: DEBUG oslo_concurrency.lockutils [None req-807df1ad-a56e-4542-a89e-860ad319f30c tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Lock "2add1602-122e-41d7-af83-b71d8dab9288" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.048s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.836973] env[63028]: DEBUG oslo_vmware.api [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736295, 'name': PowerOnVM_Task, 'duration_secs': 0.939523} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.838218] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1094.838576] env[63028]: INFO nova.compute.manager [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Took 8.67 seconds to spawn the instance on the hypervisor. [ 1094.839160] env[63028]: DEBUG nova.compute.manager [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1094.840206] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a070b41-96ef-49e8-bfb5-bdeb5f83ca7b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.849725] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f3a8f9-2157-4ecc-9e0c-81d666d9243b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.871097] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06447adc-08e2-4842-9a9d-b5d2d7e71d2a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.908013] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61cdaf1-bc56-4fb9-bf85-fd4879cce3c3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.913073] env[63028]: DEBUG nova.compute.manager [req-270ca5cc-c5c2-4f50-8fde-87e1f13f53e5 req-3a262e14-08cf-4fbc-88f2-4ebd06d7175e service nova] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Detach interface failed, port_id=b2a35d65-e481-4d0a-8dc1-3b3b8dec509b, reason: Instance 1f8415cc-f544-4c89-9863-43d5ae9144e8 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1094.930019] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736298, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.935670] env[63028]: DEBUG nova.virt.block_device [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Updating existing volume attachment record: cd318316-8ca2-4d57-8373-a26e66a7b530 {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1094.951166] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736299, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460804} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.951423] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] a7ff444e-43bc-4925-9754-86ff30de6751/a7ff444e-43bc-4925-9754-86ff30de6751.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1094.951655] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1094.952075] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c07c2e0-711b-4495-b144-143ae3b2f7f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.961971] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1094.961971] env[63028]: value = "task-2736300" [ 1094.961971] env[63028]: _type = "Task" [ 1094.961971] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.971889] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736300, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.051416] env[63028]: INFO nova.compute.manager [-] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Took 1.30 seconds to deallocate network for instance. [ 1095.297053] env[63028]: DEBUG nova.compute.utils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1095.301752] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1095.301752] env[63028]: DEBUG nova.network.neutron [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1095.392906] env[63028]: DEBUG nova.policy [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2169c5f761b3452bb04fdf14cf6f1ff5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c83df00f440248ca9e84394ce6365144', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1095.397440] env[63028]: INFO nova.compute.manager [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Took 26.44 seconds to build instance. [ 1095.431516] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736298, 'name': ReconfigVM_Task, 'duration_secs': 0.539939} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.436210] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 60d18f14-536a-4b0f-912b-21f3f5a30d28/60d18f14-536a-4b0f-912b-21f3f5a30d28.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1095.437673] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d22391b-ae11-4071-a463-cfe239133b7e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.470030] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1095.470030] env[63028]: value = "task-2736302" [ 1095.470030] env[63028]: _type = "Task" [ 1095.470030] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.477763] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736300, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121424} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.482019] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1095.482019] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5975a19b-6f56-4234-bc09-8784398d6b2b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.489560] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736302, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.517507] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] a7ff444e-43bc-4925-9754-86ff30de6751/a7ff444e-43bc-4925-9754-86ff30de6751.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1095.517507] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7743e7dd-47de-4e6d-8bb6-7ed6cb3cd5a1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.539676] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1095.539676] env[63028]: value = "task-2736303" [ 1095.539676] env[63028]: _type = "Task" [ 1095.539676] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.543936] env[63028]: DEBUG nova.network.neutron [req-f708af62-830b-4e17-9d33-648ad762fa47 req-d96d064c-c18b-4daa-b5d8-855f607e0d98 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updated VIF entry in instance network info cache for port 197b3459-f9f1-4fe3-a9ad-169350b4d637. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1095.544376] env[63028]: DEBUG nova.network.neutron [req-f708af62-830b-4e17-9d33-648ad762fa47 req-d96d064c-c18b-4daa-b5d8-855f607e0d98 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updating instance_info_cache with network_info: [{"id": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "address": "fa:16:3e:cb:8a:7c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap197b3459-f9", "ovs_interfaceid": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.551321] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736303, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.557245] env[63028]: DEBUG oslo_concurrency.lockutils [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.622957] env[63028]: DEBUG oslo_concurrency.lockutils [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Acquiring lock "2add1602-122e-41d7-af83-b71d8dab9288" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.623241] env[63028]: DEBUG oslo_concurrency.lockutils [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Lock "2add1602-122e-41d7-af83-b71d8dab9288" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.623449] env[63028]: DEBUG oslo_concurrency.lockutils [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Acquiring lock "2add1602-122e-41d7-af83-b71d8dab9288-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.623686] env[63028]: DEBUG oslo_concurrency.lockutils [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Lock "2add1602-122e-41d7-af83-b71d8dab9288-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.623899] env[63028]: DEBUG oslo_concurrency.lockutils [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Lock "2add1602-122e-41d7-af83-b71d8dab9288-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.628704] env[63028]: INFO nova.compute.manager [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Terminating instance [ 1095.802487] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1095.870130] env[63028]: DEBUG nova.network.neutron [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Successfully created port: 4f055c6d-021a-4083-bc67-1c9a8e24f55e {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1095.899296] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed5ccc95-c538-41b6-beda-16d461eedf6f tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.978s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.981212] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736302, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.052021] env[63028]: DEBUG oslo_concurrency.lockutils [req-f708af62-830b-4e17-9d33-648ad762fa47 req-d96d064c-c18b-4daa-b5d8-855f607e0d98 service nova] Releasing lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.059530] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736303, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.134458] env[63028]: DEBUG nova.compute.manager [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1096.134744] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1096.135878] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b21e8a-8486-4a3f-9531-1bbb45ea4557 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.144476] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1096.144902] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65ad4c1f-a7ab-4aee-889e-6f5e27d25bd2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.147946] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47cde60-d72a-4849-90d3-a11d7b6eb7f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.156702] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d625f0-6b29-41c1-9bd2-1146af591716 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.160375] env[63028]: DEBUG oslo_vmware.api [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Waiting for the task: (returnval){ [ 1096.160375] env[63028]: value = "task-2736304" [ 1096.160375] env[63028]: _type = "Task" [ 1096.160375] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.195482] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4784a1b9-118b-4f86-b970-21d8a2d0f6b8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.203616] env[63028]: DEBUG oslo_vmware.api [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736304, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.209793] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fc8daa-acf2-4b06-86a1-9e915ea579a1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.232506] env[63028]: DEBUG nova.compute.provider_tree [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.482986] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736302, 'name': Rename_Task, 'duration_secs': 0.58916} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.482986] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1096.482986] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8da639d0-bfbc-4fab-9f98-4b033c6c2fbe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.490338] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1096.490338] env[63028]: value = "task-2736305" [ 1096.490338] env[63028]: _type = "Task" [ 1096.490338] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.501777] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736305, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.550817] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736303, 'name': ReconfigVM_Task, 'duration_secs': 0.696806} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.552074] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Reconfigured VM instance instance-0000006d to attach disk [datastore2] a7ff444e-43bc-4925-9754-86ff30de6751/a7ff444e-43bc-4925-9754-86ff30de6751.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1096.552735] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2bfee52f-e8d9-4751-9892-d93569ef18bd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.560926] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1096.560926] env[63028]: value = "task-2736306" [ 1096.560926] env[63028]: _type = "Task" [ 1096.560926] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.570102] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736306, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.573078] env[63028]: DEBUG nova.compute.manager [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Received event network-changed-197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1096.573279] env[63028]: DEBUG nova.compute.manager [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Refreshing instance network info cache due to event network-changed-197b3459-f9f1-4fe3-a9ad-169350b4d637. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1096.573504] env[63028]: DEBUG oslo_concurrency.lockutils [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] Acquiring lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.573642] env[63028]: DEBUG oslo_concurrency.lockutils [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] Acquired lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.573851] env[63028]: DEBUG nova.network.neutron [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Refreshing network info cache for port 197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1096.673596] env[63028]: DEBUG oslo_vmware.api [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736304, 'name': PowerOffVM_Task, 'duration_secs': 0.194981} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.673596] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.673596] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1096.673596] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6367d9a-438d-44e4-8667-334d26c999e4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.738182] env[63028]: DEBUG nova.scheduler.client.report [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1096.747278] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1096.747502] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1096.747681] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Deleting the datastore file [datastore2] 2add1602-122e-41d7-af83-b71d8dab9288 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.747964] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a292f1d-c156-4451-895f-74e77e4b5627 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.758821] env[63028]: DEBUG oslo_vmware.api [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Waiting for the task: (returnval){ [ 1096.758821] env[63028]: value = "task-2736308" [ 1096.758821] env[63028]: _type = "Task" [ 1096.758821] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.772556] env[63028]: DEBUG oslo_vmware.api [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.814235] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1096.852790] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1096.853052] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1096.853245] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1096.853523] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1096.853676] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1096.853826] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1096.854053] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1096.854222] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1096.854451] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1096.854624] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1096.854869] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1096.855953] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7069bb-8d22-4b70-89ba-d38eb0a6b442 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.864700] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740c0d51-2039-44b4-a4f9-afccbd278562 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.001388] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736305, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.070796] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736306, 'name': Rename_Task, 'duration_secs': 0.166838} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.071094] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1097.071409] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d67202e-f908-4311-b0f9-f87ba800ebb5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.078757] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1097.078757] env[63028]: value = "task-2736309" [ 1097.078757] env[63028]: _type = "Task" [ 1097.078757] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.086894] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736309, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.127640] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "e5767896-8203-4b18-826f-dcb2fe02268e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.127640] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "e5767896-8203-4b18-826f-dcb2fe02268e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.244146] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.446s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.245093] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1097.248558] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 14.261s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.273128] env[63028]: DEBUG oslo_vmware.api [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Task: {'id': task-2736308, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135146} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.273457] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.273657] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1097.273836] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1097.274011] env[63028]: INFO nova.compute.manager [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1097.274264] env[63028]: DEBUG oslo.service.loopingcall [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1097.274837] env[63028]: DEBUG nova.compute.manager [-] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1097.274837] env[63028]: DEBUG nova.network.neutron [-] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1097.500314] env[63028]: DEBUG oslo_vmware.api [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736305, 'name': PowerOnVM_Task, 'duration_secs': 0.578383} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.500659] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1097.500809] env[63028]: DEBUG nova.compute.manager [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1097.501672] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54228051-ec54-40f2-b736-1f4fbef6be1a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.589612] env[63028]: DEBUG oslo_vmware.api [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736309, 'name': PowerOnVM_Task, 'duration_secs': 0.499892} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.589898] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1097.590571] env[63028]: INFO nova.compute.manager [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Took 9.00 seconds to spawn the instance on the hypervisor. [ 1097.590818] env[63028]: DEBUG nova.compute.manager [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1097.591637] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5987bb1b-f5bc-497b-8a20-7ae3245125c6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.633237] env[63028]: DEBUG nova.compute.manager [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1097.751881] env[63028]: DEBUG nova.compute.utils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1097.753413] env[63028]: DEBUG nova.objects.instance [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'migration_context' on Instance uuid d41a1eae-bb89-4222-9466-d86af891c654 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1097.758860] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1097.759047] env[63028]: DEBUG nova.network.neutron [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1097.762313] env[63028]: DEBUG nova.network.neutron [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updated VIF entry in instance network info cache for port 197b3459-f9f1-4fe3-a9ad-169350b4d637. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1097.762656] env[63028]: DEBUG nova.network.neutron [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updating instance_info_cache with network_info: [{"id": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "address": "fa:16:3e:cb:8a:7c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap197b3459-f9", "ovs_interfaceid": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.825018] env[63028]: DEBUG nova.compute.manager [req-dc8e31f7-953c-4b3a-aad6-6d44c04e1152 req-6132d904-adc1-4612-ba41-55d6001af9c1 service nova] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Received event network-vif-plugged-4f055c6d-021a-4083-bc67-1c9a8e24f55e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1097.825300] env[63028]: DEBUG oslo_concurrency.lockutils [req-dc8e31f7-953c-4b3a-aad6-6d44c04e1152 req-6132d904-adc1-4612-ba41-55d6001af9c1 service nova] Acquiring lock "3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.825663] env[63028]: DEBUG oslo_concurrency.lockutils [req-dc8e31f7-953c-4b3a-aad6-6d44c04e1152 req-6132d904-adc1-4612-ba41-55d6001af9c1 service nova] Lock "3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.825901] env[63028]: DEBUG oslo_concurrency.lockutils [req-dc8e31f7-953c-4b3a-aad6-6d44c04e1152 req-6132d904-adc1-4612-ba41-55d6001af9c1 service nova] Lock "3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.826287] env[63028]: DEBUG nova.compute.manager [req-dc8e31f7-953c-4b3a-aad6-6d44c04e1152 req-6132d904-adc1-4612-ba41-55d6001af9c1 service nova] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] No waiting events found dispatching network-vif-plugged-4f055c6d-021a-4083-bc67-1c9a8e24f55e {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1097.826510] env[63028]: WARNING nova.compute.manager [req-dc8e31f7-953c-4b3a-aad6-6d44c04e1152 req-6132d904-adc1-4612-ba41-55d6001af9c1 service nova] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Received unexpected event network-vif-plugged-4f055c6d-021a-4083-bc67-1c9a8e24f55e for instance with vm_state building and task_state spawning. [ 1097.884651] env[63028]: DEBUG nova.policy [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2169c5f761b3452bb04fdf14cf6f1ff5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c83df00f440248ca9e84394ce6365144', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1098.019688] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.111953] env[63028]: INFO nova.compute.manager [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Took 25.14 seconds to build instance. [ 1098.156127] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.189217] env[63028]: DEBUG nova.compute.manager [req-67ebcb59-e15e-4a11-a2e8-ced2a39a954a req-dd9f9174-f14e-4f07-9996-4b2e2e839e00 service nova] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Received event network-vif-deleted-34bcf965-61f1-430c-a4dd-fab458782af7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1098.189454] env[63028]: INFO nova.compute.manager [req-67ebcb59-e15e-4a11-a2e8-ced2a39a954a req-dd9f9174-f14e-4f07-9996-4b2e2e839e00 service nova] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Neutron deleted interface 34bcf965-61f1-430c-a4dd-fab458782af7; detaching it from the instance and deleting it from the info cache [ 1098.189665] env[63028]: DEBUG nova.network.neutron [req-67ebcb59-e15e-4a11-a2e8-ced2a39a954a req-dd9f9174-f14e-4f07-9996-4b2e2e839e00 service nova] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.248903] env[63028]: DEBUG nova.network.neutron [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Successfully created port: 7807adb2-232e-40cb-b8af-cb7c31fdfc78 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1098.260193] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1098.270807] env[63028]: DEBUG oslo_concurrency.lockutils [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] Releasing lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.271064] env[63028]: DEBUG nova.compute.manager [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Received event network-changed-60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1098.271314] env[63028]: DEBUG nova.compute.manager [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Refreshing instance network info cache due to event network-changed-60891063-6c30-480a-8e2b-f3960496f2fd. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1098.271424] env[63028]: DEBUG oslo_concurrency.lockutils [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] Acquiring lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.271560] env[63028]: DEBUG oslo_concurrency.lockutils [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] Acquired lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.271710] env[63028]: DEBUG nova.network.neutron [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Refreshing network info cache for port 60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1098.439761] env[63028]: DEBUG nova.network.neutron [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Successfully updated port: 4f055c6d-021a-4083-bc67-1c9a8e24f55e {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1098.546013] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ddacf8-6555-4c6b-bd74-4383a33bbb07 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.553852] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71df9154-a85b-42fc-81f6-0aebd9fdaa47 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.583769] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bd363b-75fc-41ce-9e38-11ea3c8f0c50 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.586653] env[63028]: DEBUG nova.network.neutron [-] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.592801] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ad8945-74c2-4bc3-8438-8f9e00886875 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.607531] env[63028]: DEBUG nova.compute.provider_tree [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.613857] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9fd4d01a-3dac-4bb3-843a-ce388315f026 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "a7ff444e-43bc-4925-9754-86ff30de6751" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.651s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.692489] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f0d70bea-7bab-47a6-ae67-0b5fd5d96ad7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.704190] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd45098-1e62-4eb4-93db-a0f963ca6335 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.738797] env[63028]: DEBUG nova.compute.manager [req-67ebcb59-e15e-4a11-a2e8-ced2a39a954a req-dd9f9174-f14e-4f07-9996-4b2e2e839e00 service nova] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Detach interface failed, port_id=34bcf965-61f1-430c-a4dd-fab458782af7, reason: Instance 2add1602-122e-41d7-af83-b71d8dab9288 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1098.841990] env[63028]: DEBUG oslo_concurrency.lockutils [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "092c7673-97fb-4085-852c-04a7c19a73e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.842296] env[63028]: DEBUG oslo_concurrency.lockutils [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "092c7673-97fb-4085-852c-04a7c19a73e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.943831] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "refresh_cache-3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.943831] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired lock "refresh_cache-3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.943831] env[63028]: DEBUG nova.network.neutron [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1099.089606] env[63028]: INFO nova.compute.manager [-] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Took 1.81 seconds to deallocate network for instance. [ 1099.095800] env[63028]: DEBUG nova.network.neutron [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updated VIF entry in instance network info cache for port 60891063-6c30-480a-8e2b-f3960496f2fd. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1099.096072] env[63028]: DEBUG nova.network.neutron [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updating instance_info_cache with network_info: [{"id": "60891063-6c30-480a-8e2b-f3960496f2fd", "address": "fa:16:3e:84:9a:c5", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60891063-6c", "ovs_interfaceid": "60891063-6c30-480a-8e2b-f3960496f2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.111274] env[63028]: DEBUG nova.scheduler.client.report [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1099.285184] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1099.312588] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1099.312588] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1099.312588] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1099.312588] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1099.312809] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1099.312809] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1099.313103] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1099.313319] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1099.313530] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1099.313729] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1099.313939] env[63028]: DEBUG nova.virt.hardware [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1099.314886] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad8b308-35f0-4a35-a033-031e483ac25e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.324100] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ac4714-2a9c-4327-a904-e5e2c01bad39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.344956] env[63028]: DEBUG nova.compute.manager [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1099.475568] env[63028]: DEBUG nova.network.neutron [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1099.486540] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1099.486826] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550875', 'volume_id': '8a20dc5f-0351-4017-9a2d-4311f8616190', 'name': 'volume-8a20dc5f-0351-4017-9a2d-4311f8616190', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f804ec95-0b97-4960-844d-b678b97fc401', 'attached_at': '', 'detached_at': '', 'volume_id': '8a20dc5f-0351-4017-9a2d-4311f8616190', 'serial': '8a20dc5f-0351-4017-9a2d-4311f8616190'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1099.487712] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e9f502-c6f4-4bff-ac7e-c5653a8b75e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.504237] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c34d4b-0baa-4cc4-a481-9abf07af519a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.531888] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] volume-8a20dc5f-0351-4017-9a2d-4311f8616190/volume-8a20dc5f-0351-4017-9a2d-4311f8616190.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.534455] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f415c2f2-0dc3-4e81-b791-bde31b57c518 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.553140] env[63028]: DEBUG oslo_vmware.api [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1099.553140] env[63028]: value = "task-2736311" [ 1099.553140] env[63028]: _type = "Task" [ 1099.553140] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.563434] env[63028]: DEBUG oslo_vmware.api [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736311, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.598697] env[63028]: DEBUG oslo_concurrency.lockutils [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.599210] env[63028]: DEBUG oslo_concurrency.lockutils [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] Releasing lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.599465] env[63028]: DEBUG nova.compute.manager [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Received event network-changed-4c30382c-2b94-4990-b3d6-533480eb847b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1099.599636] env[63028]: DEBUG nova.compute.manager [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Refreshing instance network info cache due to event network-changed-4c30382c-2b94-4990-b3d6-533480eb847b. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1099.599852] env[63028]: DEBUG oslo_concurrency.lockutils [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] Acquiring lock "refresh_cache-6865f832-d409-4b9b-8b6c-33b0bf07d2b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.599994] env[63028]: DEBUG oslo_concurrency.lockutils [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] Acquired lock "refresh_cache-6865f832-d409-4b9b-8b6c-33b0bf07d2b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.600186] env[63028]: DEBUG nova.network.neutron [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Refreshing network info cache for port 4c30382c-2b94-4990-b3d6-533480eb847b {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1099.761952] env[63028]: DEBUG nova.network.neutron [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Updating instance_info_cache with network_info: [{"id": "4f055c6d-021a-4083-bc67-1c9a8e24f55e", "address": "fa:16:3e:90:a3:13", "network": {"id": "15d7a776-341f-4ba6-b8c6-f0cd9f0688c1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1631909054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c83df00f440248ca9e84394ce6365144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f055c6d-02", "ovs_interfaceid": "4f055c6d-021a-4083-bc67-1c9a8e24f55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.875298] env[63028]: DEBUG oslo_concurrency.lockutils [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.879988] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "a7ff444e-43bc-4925-9754-86ff30de6751" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.880338] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "a7ff444e-43bc-4925-9754-86ff30de6751" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.880615] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "a7ff444e-43bc-4925-9754-86ff30de6751-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.881249] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "a7ff444e-43bc-4925-9754-86ff30de6751-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.881249] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "a7ff444e-43bc-4925-9754-86ff30de6751-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.885898] env[63028]: INFO nova.compute.manager [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Terminating instance [ 1099.904437] env[63028]: DEBUG nova.network.neutron [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Successfully updated port: 7807adb2-232e-40cb-b8af-cb7c31fdfc78 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1100.063273] env[63028]: DEBUG oslo_vmware.api [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736311, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.122147] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.874s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.128035] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.758s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.128283] env[63028]: DEBUG nova.objects.instance [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lazy-loading 'resources' on Instance uuid f0ca0d73-d428-4b8c-acac-a80b7b7dd793 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1100.227807] env[63028]: DEBUG nova.compute.manager [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Received event network-changed-4f055c6d-021a-4083-bc67-1c9a8e24f55e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1100.228117] env[63028]: DEBUG nova.compute.manager [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Refreshing instance network info cache due to event network-changed-4f055c6d-021a-4083-bc67-1c9a8e24f55e. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1100.228287] env[63028]: DEBUG oslo_concurrency.lockutils [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] Acquiring lock "refresh_cache-3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.264928] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Releasing lock "refresh_cache-3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.265126] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Instance network_info: |[{"id": "4f055c6d-021a-4083-bc67-1c9a8e24f55e", "address": "fa:16:3e:90:a3:13", "network": {"id": "15d7a776-341f-4ba6-b8c6-f0cd9f0688c1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1631909054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c83df00f440248ca9e84394ce6365144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f055c6d-02", "ovs_interfaceid": "4f055c6d-021a-4083-bc67-1c9a8e24f55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1100.265597] env[63028]: DEBUG oslo_concurrency.lockutils [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] Acquired lock "refresh_cache-3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.265721] env[63028]: DEBUG nova.network.neutron [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Refreshing network info cache for port 4f055c6d-021a-4083-bc67-1c9a8e24f55e {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1100.266999] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:a3:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f055c6d-021a-4083-bc67-1c9a8e24f55e', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1100.274491] env[63028]: DEBUG oslo.service.loopingcall [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1100.278287] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1100.278566] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e33bf9f-9382-4835-a738-e5a93f58c479 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.300236] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1100.300236] env[63028]: value = "task-2736312" [ 1100.300236] env[63028]: _type = "Task" [ 1100.300236] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.309518] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736312, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.389882] env[63028]: DEBUG nova.compute.manager [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1100.390196] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1100.391192] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6bb293c-7d4f-4614-8b0e-5ca3e02ad9c0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.399667] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1100.400594] env[63028]: DEBUG nova.network.neutron [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Updated VIF entry in instance network info cache for port 4c30382c-2b94-4990-b3d6-533480eb847b. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1100.400962] env[63028]: DEBUG nova.network.neutron [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Updating instance_info_cache with network_info: [{"id": "4c30382c-2b94-4990-b3d6-533480eb847b", "address": "fa:16:3e:64:a0:ec", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c30382c-2b", "ovs_interfaceid": "4c30382c-2b94-4990-b3d6-533480eb847b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.402216] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1277f005-f36a-4d52-8a60-ebe3c2b215fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.406717] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "refresh_cache-719e014f-0544-4832-81ae-26b028b17be0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.406992] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired lock "refresh_cache-719e014f-0544-4832-81ae-26b028b17be0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.407131] env[63028]: DEBUG nova.network.neutron [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1100.409176] env[63028]: DEBUG oslo_vmware.api [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1100.409176] env[63028]: value = "task-2736313" [ 1100.409176] env[63028]: _type = "Task" [ 1100.409176] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.418325] env[63028]: DEBUG oslo_vmware.api [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736313, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.566873] env[63028]: DEBUG oslo_vmware.api [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736311, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.811771] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736312, 'name': CreateVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.905020] env[63028]: DEBUG oslo_concurrency.lockutils [req-08563fdd-7fa8-438c-bf8e-82d4d7b24c77 req-dd10df65-6640-416a-ae2a-d113d72973c8 service nova] Releasing lock "refresh_cache-6865f832-d409-4b9b-8b6c-33b0bf07d2b2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.908320] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a1f237-ed0f-4ab5-824f-f709981e63a0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.920197] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a687efd0-14ed-40b9-943c-74fa3eed24aa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.929239] env[63028]: DEBUG oslo_vmware.api [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736313, 'name': PowerOffVM_Task, 'duration_secs': 0.491963} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.929854] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1100.930410] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1100.930613] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f5ff594-40d8-426d-89fd-57c3a5b2c144 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.960220] env[63028]: DEBUG nova.network.neutron [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1100.965094] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b7800d-00fa-42e4-baa0-bc0a384c5731 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.972332] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3cfa3be-10bb-4cf5-847d-25ccd26b65d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.985448] env[63028]: DEBUG nova.compute.provider_tree [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.046838] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1101.047131] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1101.047269] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleting the datastore file [datastore2] a7ff444e-43bc-4925-9754-86ff30de6751 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1101.047527] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6e0e94f-b99f-4c8f-b9b5-1f65a35a429c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.054328] env[63028]: DEBUG oslo_vmware.api [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for the task: (returnval){ [ 1101.054328] env[63028]: value = "task-2736315" [ 1101.054328] env[63028]: _type = "Task" [ 1101.054328] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.068405] env[63028]: DEBUG oslo_concurrency.lockutils [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "interface-e048cadf-9dc1-4eb7-a825-422d0736231c-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.068641] env[63028]: DEBUG oslo_concurrency.lockutils [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-e048cadf-9dc1-4eb7-a825-422d0736231c-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.069077] env[63028]: DEBUG nova.objects.instance [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'flavor' on Instance uuid e048cadf-9dc1-4eb7-a825-422d0736231c {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.070178] env[63028]: DEBUG oslo_vmware.api [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736315, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.073110] env[63028]: DEBUG oslo_vmware.api [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736311, 'name': ReconfigVM_Task, 'duration_secs': 1.188944} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.073586] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Reconfigured VM instance instance-00000062 to attach disk [datastore1] volume-8a20dc5f-0351-4017-9a2d-4311f8616190/volume-8a20dc5f-0351-4017-9a2d-4311f8616190.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1101.078449] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c33658b-8574-4f24-8246-ed6dd4f56bb0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.097518] env[63028]: DEBUG oslo_vmware.api [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1101.097518] env[63028]: value = "task-2736316" [ 1101.097518] env[63028]: _type = "Task" [ 1101.097518] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.106420] env[63028]: DEBUG oslo_vmware.api [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736316, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.145306] env[63028]: DEBUG nova.network.neutron [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Updated VIF entry in instance network info cache for port 4f055c6d-021a-4083-bc67-1c9a8e24f55e. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1101.145700] env[63028]: DEBUG nova.network.neutron [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Updating instance_info_cache with network_info: [{"id": "4f055c6d-021a-4083-bc67-1c9a8e24f55e", "address": "fa:16:3e:90:a3:13", "network": {"id": "15d7a776-341f-4ba6-b8c6-f0cd9f0688c1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1631909054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c83df00f440248ca9e84394ce6365144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f055c6d-02", "ovs_interfaceid": "4f055c6d-021a-4083-bc67-1c9a8e24f55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.187442] env[63028]: DEBUG nova.network.neutron [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Updating instance_info_cache with network_info: [{"id": "7807adb2-232e-40cb-b8af-cb7c31fdfc78", "address": "fa:16:3e:ff:95:7a", "network": {"id": "15d7a776-341f-4ba6-b8c6-f0cd9f0688c1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1631909054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c83df00f440248ca9e84394ce6365144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7807adb2-23", "ovs_interfaceid": "7807adb2-232e-40cb-b8af-cb7c31fdfc78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.310602] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736312, 'name': CreateVM_Task, 'duration_secs': 0.572532} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.310871] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1101.311427] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1101.311596] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.311925] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1101.312180] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebd13104-6557-4b8c-a915-27a281f89a64 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.316457] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1101.316457] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5291fc3e-8c60-2218-98bf-0ffe0ce3d738" [ 1101.316457] env[63028]: _type = "Task" [ 1101.316457] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.325583] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5291fc3e-8c60-2218-98bf-0ffe0ce3d738, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.489392] env[63028]: DEBUG nova.scheduler.client.report [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.564111] env[63028]: DEBUG oslo_vmware.api [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Task: {'id': task-2736315, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188253} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.564442] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1101.564687] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1101.564928] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1101.565182] env[63028]: INFO nova.compute.manager [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1101.565469] env[63028]: DEBUG oslo.service.loopingcall [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1101.565693] env[63028]: DEBUG nova.compute.manager [-] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1101.565818] env[63028]: DEBUG nova.network.neutron [-] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1101.607078] env[63028]: DEBUG oslo_vmware.api [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736316, 'name': ReconfigVM_Task, 'duration_secs': 0.152311} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.607394] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550875', 'volume_id': '8a20dc5f-0351-4017-9a2d-4311f8616190', 'name': 'volume-8a20dc5f-0351-4017-9a2d-4311f8616190', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f804ec95-0b97-4960-844d-b678b97fc401', 'attached_at': '', 'detached_at': '', 'volume_id': '8a20dc5f-0351-4017-9a2d-4311f8616190', 'serial': '8a20dc5f-0351-4017-9a2d-4311f8616190'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1101.650193] env[63028]: DEBUG oslo_concurrency.lockutils [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] Releasing lock "refresh_cache-3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.650363] env[63028]: DEBUG nova.compute.manager [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Received event network-vif-plugged-7807adb2-232e-40cb-b8af-cb7c31fdfc78 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1101.650564] env[63028]: DEBUG oslo_concurrency.lockutils [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] Acquiring lock "719e014f-0544-4832-81ae-26b028b17be0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.650768] env[63028]: DEBUG oslo_concurrency.lockutils [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] Lock "719e014f-0544-4832-81ae-26b028b17be0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.650939] env[63028]: DEBUG oslo_concurrency.lockutils [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] Lock "719e014f-0544-4832-81ae-26b028b17be0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.651119] env[63028]: DEBUG nova.compute.manager [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 719e014f-0544-4832-81ae-26b028b17be0] No waiting events found dispatching network-vif-plugged-7807adb2-232e-40cb-b8af-cb7c31fdfc78 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1101.651295] env[63028]: WARNING nova.compute.manager [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Received unexpected event network-vif-plugged-7807adb2-232e-40cb-b8af-cb7c31fdfc78 for instance with vm_state building and task_state spawning. [ 1101.651451] env[63028]: DEBUG nova.compute.manager [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Received event network-changed-7807adb2-232e-40cb-b8af-cb7c31fdfc78 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1101.651604] env[63028]: DEBUG nova.compute.manager [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Refreshing instance network info cache due to event network-changed-7807adb2-232e-40cb-b8af-cb7c31fdfc78. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1101.651768] env[63028]: DEBUG oslo_concurrency.lockutils [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] Acquiring lock "refresh_cache-719e014f-0544-4832-81ae-26b028b17be0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1101.670408] env[63028]: INFO nova.compute.manager [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Swapping old allocation on dict_keys(['399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2']) held by migration 29632012-2d70-44ac-b011-da63d2c5ae9c for instance [ 1101.689738] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Releasing lock "refresh_cache-719e014f-0544-4832-81ae-26b028b17be0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.690056] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Instance network_info: |[{"id": "7807adb2-232e-40cb-b8af-cb7c31fdfc78", "address": "fa:16:3e:ff:95:7a", "network": {"id": "15d7a776-341f-4ba6-b8c6-f0cd9f0688c1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1631909054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c83df00f440248ca9e84394ce6365144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7807adb2-23", "ovs_interfaceid": "7807adb2-232e-40cb-b8af-cb7c31fdfc78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1101.690351] env[63028]: DEBUG oslo_concurrency.lockutils [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] Acquired lock "refresh_cache-719e014f-0544-4832-81ae-26b028b17be0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.690528] env[63028]: DEBUG nova.network.neutron [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Refreshing network info cache for port 7807adb2-232e-40cb-b8af-cb7c31fdfc78 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1101.691640] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:95:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7807adb2-232e-40cb-b8af-cb7c31fdfc78', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1101.699211] env[63028]: DEBUG oslo.service.loopingcall [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1101.701370] env[63028]: DEBUG nova.scheduler.client.report [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Overwriting current allocation {'allocations': {'399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 149}}, 'project_id': 'b4dcaef840f940bda057d0371cdc5adb', 'user_id': '8b3c3b29bb4d4c23a09527bcda019773', 'consumer_generation': 1} on consumer d41a1eae-bb89-4222-9466-d86af891c654 {{(pid=63028) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1101.705305] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1101.706134] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36b18ee8-5c90-4ad7-9a19-7b27b012a141 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.725806] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1101.725806] env[63028]: value = "task-2736317" [ 1101.725806] env[63028]: _type = "Task" [ 1101.725806] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.734222] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736317, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.736404] env[63028]: DEBUG nova.objects.instance [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'pci_requests' on Instance uuid e048cadf-9dc1-4eb7-a825-422d0736231c {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.827496] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5291fc3e-8c60-2218-98bf-0ffe0ce3d738, 'name': SearchDatastore_Task, 'duration_secs': 0.009186} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.827782] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.828058] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1101.828345] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1101.828541] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.828797] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1101.829058] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d0f7fd3-c02f-49b6-9b4c-c1090019e7c8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.836762] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1101.836931] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1101.837616] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc93d10c-a0c9-4b45-874e-0d2d94a122cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.842356] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1101.842356] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52917a3e-a19c-c7db-74f6-2c3e2ba51d57" [ 1101.842356] env[63028]: _type = "Task" [ 1101.842356] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.850119] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52917a3e-a19c-c7db-74f6-2c3e2ba51d57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.864095] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1101.864363] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.864571] env[63028]: DEBUG nova.network.neutron [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1101.995155] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.867s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.997766] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.385s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.998027] env[63028]: DEBUG nova.objects.instance [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lazy-loading 'resources' on Instance uuid b3930760-1888-4f80-85d8-65120a25f275 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.026331] env[63028]: INFO nova.scheduler.client.report [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Deleted allocations for instance f0ca0d73-d428-4b8c-acac-a80b7b7dd793 [ 1102.133567] env[63028]: DEBUG nova.network.neutron [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Updated VIF entry in instance network info cache for port 7807adb2-232e-40cb-b8af-cb7c31fdfc78. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1102.133661] env[63028]: DEBUG nova.network.neutron [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Updating instance_info_cache with network_info: [{"id": "7807adb2-232e-40cb-b8af-cb7c31fdfc78", "address": "fa:16:3e:ff:95:7a", "network": {"id": "15d7a776-341f-4ba6-b8c6-f0cd9f0688c1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1631909054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c83df00f440248ca9e84394ce6365144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7807adb2-23", "ovs_interfaceid": "7807adb2-232e-40cb-b8af-cb7c31fdfc78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.236112] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736317, 'name': CreateVM_Task, 'duration_secs': 0.362923} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.236324] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1102.237049] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.237228] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.237543] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1102.237794] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebcbb7c8-5a95-4d19-aa27-03667208789f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.239665] env[63028]: DEBUG nova.objects.base [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1102.239860] env[63028]: DEBUG nova.network.neutron [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1102.245082] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1102.245082] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521b4ae3-df77-b365-2b37-2579b2d66b5a" [ 1102.245082] env[63028]: _type = "Task" [ 1102.245082] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.255733] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521b4ae3-df77-b365-2b37-2579b2d66b5a, 'name': SearchDatastore_Task, 'duration_secs': 0.008589} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.255994] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.256352] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1102.256475] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.312806] env[63028]: DEBUG nova.policy [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b48f3f2a85945379bdb33bf153bde9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25a6457f62d149629c09589feb1a550c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1102.325421] env[63028]: DEBUG nova.compute.manager [req-2cc21592-b067-4599-b7a7-c616f6e525ec req-1630884d-c699-4f49-9ba7-370ac4694471 service nova] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Received event network-vif-deleted-b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1102.325656] env[63028]: INFO nova.compute.manager [req-2cc21592-b067-4599-b7a7-c616f6e525ec req-1630884d-c699-4f49-9ba7-370ac4694471 service nova] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Neutron deleted interface b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c; detaching it from the instance and deleting it from the info cache [ 1102.325842] env[63028]: DEBUG nova.network.neutron [req-2cc21592-b067-4599-b7a7-c616f6e525ec req-1630884d-c699-4f49-9ba7-370ac4694471 service nova] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.353403] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52917a3e-a19c-c7db-74f6-2c3e2ba51d57, 'name': SearchDatastore_Task, 'duration_secs': 0.012467} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.354282] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16459331-ec3e-4923-a3d4-20715f8c5fef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.359012] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1102.359012] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523aa848-3fd7-2cb7-c816-2e7792de44cb" [ 1102.359012] env[63028]: _type = "Task" [ 1102.359012] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.368437] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523aa848-3fd7-2cb7-c816-2e7792de44cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.530072] env[63028]: DEBUG nova.network.neutron [-] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.538630] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6daeb339-3b8e-4790-b9aa-2df593716251 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "f0ca0d73-d428-4b8c-acac-a80b7b7dd793" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.696s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.638081] env[63028]: DEBUG oslo_concurrency.lockutils [req-1f56e2aa-760d-448c-b2fc-6a744737dc7c req-b23696bf-4d50-4cde-a7d8-5c7ba300b097 service nova] Releasing lock "refresh_cache-719e014f-0544-4832-81ae-26b028b17be0" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.646345] env[63028]: DEBUG nova.objects.instance [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lazy-loading 'flavor' on Instance uuid f804ec95-0b97-4960-844d-b678b97fc401 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.753756] env[63028]: DEBUG nova.network.neutron [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance_info_cache with network_info: [{"id": "c5f1d585-d624-4525-a5b2-132b18bf9378", "address": "fa:16:3e:93:da:98", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5f1d585-d6", "ovs_interfaceid": "c5f1d585-d624-4525-a5b2-132b18bf9378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.795523] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36181e6-b6e6-4a21-b58f-896f7888b7f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.803644] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fd1aa7-468a-462a-be27-8404b2684e4e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.834993] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ef3fff9-afb8-4591-b09a-653fee354579 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.837886] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e31f33-c1f3-49e2-be70-8b839e996419 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.848759] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ddd281-6270-4180-9b5c-91e8ced8e514 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.854827] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de538bb-1310-4683-b2bc-7d6f17353a2e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.875480] env[63028]: DEBUG nova.compute.provider_tree [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.893767] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523aa848-3fd7-2cb7-c816-2e7792de44cb, 'name': SearchDatastore_Task, 'duration_secs': 0.008747} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.894311] env[63028]: DEBUG nova.compute.manager [req-2cc21592-b067-4599-b7a7-c616f6e525ec req-1630884d-c699-4f49-9ba7-370ac4694471 service nova] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Detach interface failed, port_id=b7d0f379-6cd1-4efa-beb8-c2a2f3ef548c, reason: Instance a7ff444e-43bc-4925-9754-86ff30de6751 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1102.895532] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.895685] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4/3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1102.895974] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.896181] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1102.896392] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c27e563-caf7-480c-b7ee-dfac9231d4f3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.899772] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e82b24d0-d71e-4484-8f42-7f2956e3fc63 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.906691] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1102.906691] env[63028]: value = "task-2736318" [ 1102.906691] env[63028]: _type = "Task" [ 1102.906691] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.911099] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1102.911284] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1102.912579] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9b5b4d4-6f80-4c17-84d7-b1d9a7183816 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.918125] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736318, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.922011] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1102.922011] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5237e475-8a74-f56c-ea2f-b2c955fd5137" [ 1102.922011] env[63028]: _type = "Task" [ 1102.922011] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.930147] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5237e475-8a74-f56c-ea2f-b2c955fd5137, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.039049] env[63028]: INFO nova.compute.manager [-] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Took 1.47 seconds to deallocate network for instance. [ 1103.154788] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e434ffd9-9803-475f-906f-37b8ad29e731 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.370s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.256936] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-d41a1eae-bb89-4222-9466-d86af891c654" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.257969] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c4914d-0f34-49d6-8ea4-26f9198c8ff2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.265762] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4c5205-dad2-4ae7-a1ba-c2b9ce4c8900 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.378722] env[63028]: DEBUG nova.scheduler.client.report [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.406029] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "f804ec95-0b97-4960-844d-b678b97fc401" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.406365] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.417332] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736318, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500886} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.418081] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4/3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1103.418481] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1103.418565] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a72f119-e412-4fc5-8952-d1e106b8d968 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.427581] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1103.427581] env[63028]: value = "task-2736319" [ 1103.427581] env[63028]: _type = "Task" [ 1103.427581] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.434829] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5237e475-8a74-f56c-ea2f-b2c955fd5137, 'name': SearchDatastore_Task, 'duration_secs': 0.00925} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.435957] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe061ab2-90cc-4bef-9bdf-dbf763304ae6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.441134] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736319, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.444105] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1103.444105] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52aa8680-c350-91d6-9c24-458f35f38a03" [ 1103.444105] env[63028]: _type = "Task" [ 1103.444105] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.452334] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52aa8680-c350-91d6-9c24-458f35f38a03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.545882] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.677586] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "1d008794-3c1a-46c6-b4eb-3d5441efdb22" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.677871] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "1d008794-3c1a-46c6-b4eb-3d5441efdb22" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.678193] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "1d008794-3c1a-46c6-b4eb-3d5441efdb22-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.678401] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "1d008794-3c1a-46c6-b4eb-3d5441efdb22-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.678582] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "1d008794-3c1a-46c6-b4eb-3d5441efdb22-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.680644] env[63028]: INFO nova.compute.manager [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Terminating instance [ 1103.883910] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.886s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.886306] env[63028]: DEBUG oslo_concurrency.lockutils [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.329s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.886526] env[63028]: DEBUG nova.objects.instance [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lazy-loading 'resources' on Instance uuid 1f8415cc-f544-4c89-9863-43d5ae9144e8 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1103.907691] env[63028]: INFO nova.scheduler.client.report [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted allocations for instance b3930760-1888-4f80-85d8-65120a25f275 [ 1103.912481] env[63028]: INFO nova.compute.manager [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Detaching volume 79c32ea7-4ed7-4f3a-9a4d-e052462647fc [ 1103.941822] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736319, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064554} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.941822] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1103.942274] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0a3386-b8a5-49f3-bf6e-02eed8fcdf14 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.966050] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52aa8680-c350-91d6-9c24-458f35f38a03, 'name': SearchDatastore_Task, 'duration_secs': 0.010395} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.976094] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4/3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1103.977452] env[63028]: INFO nova.virt.block_device [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Attempting to driver detach volume 79c32ea7-4ed7-4f3a-9a4d-e052462647fc from mountpoint /dev/sdb [ 1103.977569] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1103.978280] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550867', 'volume_id': '79c32ea7-4ed7-4f3a-9a4d-e052462647fc', 'name': 'volume-79c32ea7-4ed7-4f3a-9a4d-e052462647fc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f804ec95-0b97-4960-844d-b678b97fc401', 'attached_at': '', 'detached_at': '', 'volume_id': '79c32ea7-4ed7-4f3a-9a4d-e052462647fc', 'serial': '79c32ea7-4ed7-4f3a-9a4d-e052462647fc'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1103.978801] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.979268] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 719e014f-0544-4832-81ae-26b028b17be0/719e014f-0544-4832-81ae-26b028b17be0.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1103.979573] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd17e897-6557-4186-9424-76cc3dc0c49b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.994161] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167cd15e-56e9-462a-a7b9-83ff050a4c76 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.996954] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e66ad53-ecbd-4f2d-97a5-e4809630f785 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.024855] env[63028]: DEBUG nova.network.neutron [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Successfully updated port: dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1104.027082] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5030caa-3461-49c1-9de2-107aa92f48df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.030233] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1104.030233] env[63028]: value = "task-2736320" [ 1104.030233] env[63028]: _type = "Task" [ 1104.030233] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.032124] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1104.032124] env[63028]: value = "task-2736321" [ 1104.032124] env[63028]: _type = "Task" [ 1104.032124] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.043910] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0cba39d-5056-4257-90b4-81c6c1647abe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.051900] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736321, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.052157] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736320, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.073417] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eef0fd8-2e31-4d09-a1a8-dd55f0714d81 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.088395] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] The volume has not been displaced from its original location: [datastore1] volume-79c32ea7-4ed7-4f3a-9a4d-e052462647fc/volume-79c32ea7-4ed7-4f3a-9a4d-e052462647fc.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1104.093562] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1104.093886] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60c294ca-ab4a-421d-99f3-bbf91aa7cb72 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.111373] env[63028]: DEBUG oslo_vmware.api [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1104.111373] env[63028]: value = "task-2736322" [ 1104.111373] env[63028]: _type = "Task" [ 1104.111373] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.122554] env[63028]: DEBUG oslo_vmware.api [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736322, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.184121] env[63028]: DEBUG nova.compute.manager [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1104.184326] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1104.185167] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f54757-5c5a-4145-9df5-342c27fe3236 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.194598] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.194897] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-331e9c3b-df11-4e6a-893f-66f899f63c42 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.202976] env[63028]: DEBUG oslo_vmware.api [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1104.202976] env[63028]: value = "task-2736323" [ 1104.202976] env[63028]: _type = "Task" [ 1104.202976] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.212522] env[63028]: DEBUG oslo_vmware.api [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736323, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.353398] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.353744] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d34ab438-d1f2-4630-b840-4a70450ca39f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.358886] env[63028]: DEBUG nova.compute.manager [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Received event network-vif-plugged-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1104.359122] env[63028]: DEBUG oslo_concurrency.lockutils [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] Acquiring lock "e048cadf-9dc1-4eb7-a825-422d0736231c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.359388] env[63028]: DEBUG oslo_concurrency.lockutils [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] Lock "e048cadf-9dc1-4eb7-a825-422d0736231c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.359499] env[63028]: DEBUG oslo_concurrency.lockutils [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] Lock "e048cadf-9dc1-4eb7-a825-422d0736231c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.359671] env[63028]: DEBUG nova.compute.manager [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] No waiting events found dispatching network-vif-plugged-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1104.359919] env[63028]: WARNING nova.compute.manager [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Received unexpected event network-vif-plugged-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 for instance with vm_state active and task_state None. [ 1104.360102] env[63028]: DEBUG nova.compute.manager [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Received event network-changed-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1104.360261] env[63028]: DEBUG nova.compute.manager [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Refreshing instance network info cache due to event network-changed-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1104.360447] env[63028]: DEBUG oslo_concurrency.lockutils [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] Acquiring lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.360584] env[63028]: DEBUG oslo_concurrency.lockutils [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] Acquired lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.360739] env[63028]: DEBUG nova.network.neutron [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Refreshing network info cache for port dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1104.364917] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1104.364917] env[63028]: value = "task-2736324" [ 1104.364917] env[63028]: _type = "Task" [ 1104.364917] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.378221] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736324, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.418528] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5388c69a-74fc-4be8-a33f-b9fb8d66eea9 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "b3930760-1888-4f80-85d8-65120a25f275" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.340s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.532210] env[63028]: DEBUG oslo_concurrency.lockutils [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.549822] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736321, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537164} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.550145] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736320, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.550311] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 719e014f-0544-4832-81ae-26b028b17be0/719e014f-0544-4832-81ae-26b028b17be0.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1104.550527] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1104.550788] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c36593c4-75c2-4b64-aa26-6b6faf7e5229 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.557949] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1104.557949] env[63028]: value = "task-2736325" [ 1104.557949] env[63028]: _type = "Task" [ 1104.557949] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.569231] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736325, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.622425] env[63028]: DEBUG oslo_vmware.api [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736322, 'name': ReconfigVM_Task, 'duration_secs': 0.467207} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.622708] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1104.630410] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9587c2d1-fead-4d08-bcab-4cbdc615003c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.647964] env[63028]: DEBUG oslo_vmware.api [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1104.647964] env[63028]: value = "task-2736326" [ 1104.647964] env[63028]: _type = "Task" [ 1104.647964] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.658155] env[63028]: DEBUG oslo_vmware.api [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736326, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.678152] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be55d7d-6788-4db3-9405-5d9dc5514567 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.689570] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4609cd35-8f87-4199-bec8-1e7aef65173e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.727945] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1415d8f4-e053-4592-92d2-168b0a26e812 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.735670] env[63028]: DEBUG oslo_vmware.api [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736323, 'name': PowerOffVM_Task, 'duration_secs': 0.317628} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.737965] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1104.738167] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1104.738469] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-119a6773-bd8c-4cfd-b741-72b55b3e14e2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.741062] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7211614-936e-4b60-b357-a558527e1f03 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.757334] env[63028]: DEBUG nova.compute.provider_tree [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.808634] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1104.808878] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1104.809076] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Deleting the datastore file [datastore1] 1d008794-3c1a-46c6-b4eb-3d5441efdb22 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1104.809377] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c95e2058-c9d0-4858-a0a6-b0c34bf1d513 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.816645] env[63028]: DEBUG oslo_vmware.api [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for the task: (returnval){ [ 1104.816645] env[63028]: value = "task-2736328" [ 1104.816645] env[63028]: _type = "Task" [ 1104.816645] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.825310] env[63028]: DEBUG oslo_vmware.api [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736328, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.873947] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736324, 'name': PowerOffVM_Task, 'duration_secs': 0.384672} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.874224] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1104.874982] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1104.875233] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1104.875420] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1104.875628] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1104.875831] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1104.876065] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1104.876298] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1104.876464] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1104.876677] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1104.876853] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1104.877037] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1104.881937] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba8e3417-2284-4aac-856d-6e9d65ee5ea1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.897510] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1104.897510] env[63028]: value = "task-2736329" [ 1104.897510] env[63028]: _type = "Task" [ 1104.897510] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.905625] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736329, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.045565] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736320, 'name': ReconfigVM_Task, 'duration_secs': 0.731676} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.045982] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4/3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1105.046758] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-617932d1-24e5-4885-b1c2-cedc27e3a16f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.052553] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1105.052553] env[63028]: value = "task-2736330" [ 1105.052553] env[63028]: _type = "Task" [ 1105.052553] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.066502] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736330, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.071529] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736325, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070334} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.071773] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1105.072586] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb8681a-e466-4627-bd1d-7dbe755c8651 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.094247] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 719e014f-0544-4832-81ae-26b028b17be0/719e014f-0544-4832-81ae-26b028b17be0.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1105.097043] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2bd20f1-4638-4aa3-af0a-5356422958e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.118689] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1105.118689] env[63028]: value = "task-2736331" [ 1105.118689] env[63028]: _type = "Task" [ 1105.118689] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.128676] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736331, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.145770] env[63028]: DEBUG nova.network.neutron [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Added VIF to instance network info cache for port dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 1105.146348] env[63028]: DEBUG nova.network.neutron [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updating instance_info_cache with network_info: [{"id": "60891063-6c30-480a-8e2b-f3960496f2fd", "address": "fa:16:3e:84:9a:c5", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60891063-6c", "ovs_interfaceid": "60891063-6c30-480a-8e2b-f3960496f2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7", "address": "fa:16:3e:b4:a8:3d", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfcd47cc-53", "ovs_interfaceid": "dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.156804] env[63028]: DEBUG oslo_vmware.api [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736326, 'name': ReconfigVM_Task, 'duration_secs': 0.202306} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.157125] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550867', 'volume_id': '79c32ea7-4ed7-4f3a-9a4d-e052462647fc', 'name': 'volume-79c32ea7-4ed7-4f3a-9a4d-e052462647fc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f804ec95-0b97-4960-844d-b678b97fc401', 'attached_at': '', 'detached_at': '', 'volume_id': '79c32ea7-4ed7-4f3a-9a4d-e052462647fc', 'serial': '79c32ea7-4ed7-4f3a-9a4d-e052462647fc'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1105.260509] env[63028]: DEBUG nova.scheduler.client.report [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1105.327909] env[63028]: DEBUG oslo_vmware.api [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Task: {'id': task-2736328, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268669} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.328199] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1105.328385] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1105.328567] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1105.328744] env[63028]: INFO nova.compute.manager [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1105.328994] env[63028]: DEBUG oslo.service.loopingcall [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1105.329202] env[63028]: DEBUG nova.compute.manager [-] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1105.329297] env[63028]: DEBUG nova.network.neutron [-] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1105.407812] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736329, 'name': ReconfigVM_Task, 'duration_secs': 0.255949} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.408619] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dccfb73-c84e-4381-8823-505ad00026c8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.429068] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1105.429303] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1105.429462] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1105.429639] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1105.429780] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1105.429924] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1105.430135] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1105.430324] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1105.430593] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1105.430861] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1105.431150] env[63028]: DEBUG nova.virt.hardware [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1105.432061] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb6531c9-a8b9-4c12-80bf-989489bd61b6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.437701] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1105.437701] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5282f0ee-1a9b-7cc3-9361-b9e4180a4f96" [ 1105.437701] env[63028]: _type = "Task" [ 1105.437701] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.445902] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5282f0ee-1a9b-7cc3-9361-b9e4180a4f96, 'name': SearchDatastore_Task, 'duration_secs': 0.00645} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.450937] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfiguring VM instance instance-0000005a to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1105.451197] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75555dba-52ed-4ccc-9d8b-2a8b0795ba02 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.468287] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1105.468287] env[63028]: value = "task-2736332" [ 1105.468287] env[63028]: _type = "Task" [ 1105.468287] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.475871] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736332, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.562245] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736330, 'name': Rename_Task, 'duration_secs': 0.15938} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.562770] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.563037] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c03b02c5-c429-433a-99f0-f212ef9e8f94 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.570170] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1105.570170] env[63028]: value = "task-2736333" [ 1105.570170] env[63028]: _type = "Task" [ 1105.570170] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.578018] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736333, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.628275] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736331, 'name': ReconfigVM_Task, 'duration_secs': 0.295441} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.628600] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 719e014f-0544-4832-81ae-26b028b17be0/719e014f-0544-4832-81ae-26b028b17be0.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1105.629178] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-325a0623-6ff2-492c-a3f7-e2c13c722de3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.635829] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1105.635829] env[63028]: value = "task-2736334" [ 1105.635829] env[63028]: _type = "Task" [ 1105.635829] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.644095] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736334, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.648832] env[63028]: DEBUG oslo_concurrency.lockutils [req-29dd686d-75b4-443e-87e9-60cbc134e57a req-5c00112c-976a-48b3-89e8-9e82c88fda23 service nova] Releasing lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.649267] env[63028]: DEBUG oslo_concurrency.lockutils [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.649493] env[63028]: DEBUG nova.network.neutron [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1105.657896] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.658139] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.658406] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.658615] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.659128] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.659324] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.659561] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.659747] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63028) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1105.659940] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.707665] env[63028]: DEBUG nova.objects.instance [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lazy-loading 'flavor' on Instance uuid f804ec95-0b97-4960-844d-b678b97fc401 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1105.743192] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "629a3b6f-a74b-4193-bcf4-fc67a1752d5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.743515] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "629a3b6f-a74b-4193-bcf4-fc67a1752d5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.765174] env[63028]: DEBUG oslo_concurrency.lockutils [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.879s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.767495] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.748s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.767685] env[63028]: DEBUG nova.objects.instance [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63028) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1105.785033] env[63028]: INFO nova.scheduler.client.report [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted allocations for instance 1f8415cc-f544-4c89-9863-43d5ae9144e8 [ 1105.977870] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736332, 'name': ReconfigVM_Task, 'duration_secs': 0.502446} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.978559] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfigured VM instance instance-0000005a to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1105.979195] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d777e3e8-1dca-4638-8355-22d313f89516 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.002761] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] d41a1eae-bb89-4222-9466-d86af891c654/d41a1eae-bb89-4222-9466-d86af891c654.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1106.003494] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1d92973-297b-499a-8dec-0b45e598f952 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.020902] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1106.020902] env[63028]: value = "task-2736335" [ 1106.020902] env[63028]: _type = "Task" [ 1106.020902] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.028302] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736335, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.064837] env[63028]: DEBUG nova.network.neutron [-] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.080292] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736333, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.146607] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736334, 'name': Rename_Task, 'duration_secs': 0.173473} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.147445] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1106.147551] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2ff0bcf-b4bc-4959-ae82-2ca5db77daa5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.158950] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1106.158950] env[63028]: value = "task-2736336" [ 1106.158950] env[63028]: _type = "Task" [ 1106.158950] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.163779] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.169226] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736336, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.197110] env[63028]: WARNING nova.network.neutron [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] c2f1496c-e3fd-43db-a032-12cdacdb4e46 already exists in list: networks containing: ['c2f1496c-e3fd-43db-a032-12cdacdb4e46']. ignoring it [ 1106.197396] env[63028]: WARNING nova.network.neutron [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] c2f1496c-e3fd-43db-a032-12cdacdb4e46 already exists in list: networks containing: ['c2f1496c-e3fd-43db-a032-12cdacdb4e46']. ignoring it [ 1106.197622] env[63028]: WARNING nova.network.neutron [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 already exists in list: port_ids containing: ['dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7']. ignoring it [ 1106.246394] env[63028]: DEBUG nova.compute.manager [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1106.293972] env[63028]: DEBUG oslo_concurrency.lockutils [None req-40fe3359-7549-4835-a0aa-b278c4c16b6b tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "1f8415cc-f544-4c89-9863-43d5ae9144e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.721s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.537572] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736335, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.566813] env[63028]: INFO nova.compute.manager [-] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Took 1.24 seconds to deallocate network for instance. [ 1106.572712] env[63028]: DEBUG nova.network.neutron [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updating instance_info_cache with network_info: [{"id": "60891063-6c30-480a-8e2b-f3960496f2fd", "address": "fa:16:3e:84:9a:c5", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60891063-6c", "ovs_interfaceid": "60891063-6c30-480a-8e2b-f3960496f2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7", "address": "fa:16:3e:b4:a8:3d", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfcd47cc-53", "ovs_interfaceid": "dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.584073] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736333, 'name': PowerOnVM_Task, 'duration_secs': 0.597212} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.584362] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1106.584571] env[63028]: INFO nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Took 9.77 seconds to spawn the instance on the hypervisor. [ 1106.584752] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1106.585919] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ed463d-a243-4408-bbf8-ea412dbb59b6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.597973] env[63028]: DEBUG nova.compute.manager [req-d81dffbe-4571-4e22-94fb-3558d9d14385 req-e3f4f26b-77d6-4b58-b465-3bf68144bff5 service nova] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Received event network-vif-deleted-4da14eb6-411a-4cdd-afe0-bd34e474882f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1106.668792] env[63028]: DEBUG oslo_vmware.api [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736336, 'name': PowerOnVM_Task, 'duration_secs': 0.46111} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.669084] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1106.669295] env[63028]: INFO nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Took 7.38 seconds to spawn the instance on the hypervisor. [ 1106.669506] env[63028]: DEBUG nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1106.670269] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e020a1ac-e27f-44a1-8965-b2c55ecde3e4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.716389] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e262f660-6898-49b7-8aa6-8054f91bdcae tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.310s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.760513] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "f804ec95-0b97-4960-844d-b678b97fc401" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.760513] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.768425] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.778204] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c36417a8-c12b-479e-bb47-c4dc2a652aac tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.779386] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.623s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.780854] env[63028]: INFO nova.compute.claims [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1107.032547] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736335, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.079229] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.079746] env[63028]: DEBUG oslo_concurrency.lockutils [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.080367] env[63028]: DEBUG oslo_concurrency.lockutils [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.080527] env[63028]: DEBUG oslo_concurrency.lockutils [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.081364] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f511116-bf27-498a-8126-bc18e7cc6867 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.104805] env[63028]: DEBUG nova.virt.hardware [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1107.104805] env[63028]: DEBUG nova.virt.hardware [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1107.105048] env[63028]: DEBUG nova.virt.hardware [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1107.105084] env[63028]: DEBUG nova.virt.hardware [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1107.105220] env[63028]: DEBUG nova.virt.hardware [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1107.105402] env[63028]: DEBUG nova.virt.hardware [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1107.105606] env[63028]: DEBUG nova.virt.hardware [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1107.105764] env[63028]: DEBUG nova.virt.hardware [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1107.105925] env[63028]: DEBUG nova.virt.hardware [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1107.106098] env[63028]: DEBUG nova.virt.hardware [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1107.106274] env[63028]: DEBUG nova.virt.hardware [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1107.112463] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Reconfiguring VM to attach interface {{(pid=63028) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1107.114472] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07d98f03-abfd-4ee9-972c-1131e3a36f11 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.126956] env[63028]: INFO nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Took 28.08 seconds to build instance. [ 1107.134324] env[63028]: DEBUG oslo_vmware.api [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1107.134324] env[63028]: value = "task-2736337" [ 1107.134324] env[63028]: _type = "Task" [ 1107.134324] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.144253] env[63028]: DEBUG oslo_vmware.api [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736337, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.187828] env[63028]: INFO nova.compute.manager [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Took 28.11 seconds to build instance. [ 1107.263674] env[63028]: INFO nova.compute.manager [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Detaching volume 8a20dc5f-0351-4017-9a2d-4311f8616190 [ 1107.303931] env[63028]: INFO nova.virt.block_device [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Attempting to driver detach volume 8a20dc5f-0351-4017-9a2d-4311f8616190 from mountpoint /dev/sdc [ 1107.304185] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1107.304443] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550875', 'volume_id': '8a20dc5f-0351-4017-9a2d-4311f8616190', 'name': 'volume-8a20dc5f-0351-4017-9a2d-4311f8616190', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f804ec95-0b97-4960-844d-b678b97fc401', 'attached_at': '', 'detached_at': '', 'volume_id': '8a20dc5f-0351-4017-9a2d-4311f8616190', 'serial': '8a20dc5f-0351-4017-9a2d-4311f8616190'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1107.305314] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c005df5-6b28-4c16-9395-d2a62bcb0e1f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.328089] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d2e48b-7eb2-4570-aebb-36b902010d5a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.335865] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2db650-e70c-433d-b6cf-9e805f7de8f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.356403] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba53162-1a7d-4cf7-9458-441ab3bcdf05 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.371148] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] The volume has not been displaced from its original location: [datastore1] volume-8a20dc5f-0351-4017-9a2d-4311f8616190/volume-8a20dc5f-0351-4017-9a2d-4311f8616190.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1107.377164] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Reconfiguring VM instance instance-00000062 to detach disk 2002 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1107.377530] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50d7edc1-94e0-49a6-82d8-95fdfde2d1ed {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.399478] env[63028]: DEBUG oslo_vmware.api [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1107.399478] env[63028]: value = "task-2736338" [ 1107.399478] env[63028]: _type = "Task" [ 1107.399478] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.409933] env[63028]: DEBUG oslo_vmware.api [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736338, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.532763] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736335, 'name': ReconfigVM_Task, 'duration_secs': 1.428214} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.533053] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfigured VM instance instance-0000005a to attach disk [datastore1] d41a1eae-bb89-4222-9466-d86af891c654/d41a1eae-bb89-4222-9466-d86af891c654.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1107.533923] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d9f307-77d3-4f71-a6bb-5789192270e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.555843] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04cc8a2-42d1-4e08-8f6c-0210a2dfecb6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.577402] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf829ee-2a16-4b5a-946c-0c78eb749ebb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.600943] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b25085-1cb0-4fab-80ca-ad4f8b45967a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.608207] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1107.608469] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ece8246-849b-4db0-babd-c9c81d0fba11 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.613869] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1107.613869] env[63028]: value = "task-2736339" [ 1107.613869] env[63028]: _type = "Task" [ 1107.613869] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.621271] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736339, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.629263] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.588s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.645338] env[63028]: DEBUG oslo_vmware.api [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736337, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.690323] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5bef326c-441f-4862-ba86-058817bdf4f0 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "719e014f-0544-4832-81ae-26b028b17be0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.616s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.895647] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.895946] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.896138] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.896319] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.896493] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.900866] env[63028]: INFO nova.compute.manager [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Terminating instance [ 1107.913603] env[63028]: DEBUG oslo_vmware.api [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736338, 'name': ReconfigVM_Task, 'duration_secs': 0.215171} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.917218] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Reconfigured VM instance instance-00000062 to detach disk 2002 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1107.923112] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af29ecc4-013e-490c-b29f-5be6a7066db0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.947657] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.947895] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.953089] env[63028]: DEBUG oslo_vmware.api [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1107.953089] env[63028]: value = "task-2736340" [ 1107.953089] env[63028]: _type = "Task" [ 1107.953089] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.965426] env[63028]: DEBUG oslo_vmware.api [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736340, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.995534] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "719e014f-0544-4832-81ae-26b028b17be0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.995734] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "719e014f-0544-4832-81ae-26b028b17be0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.995970] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "719e014f-0544-4832-81ae-26b028b17be0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.996400] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "719e014f-0544-4832-81ae-26b028b17be0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.996725] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "719e014f-0544-4832-81ae-26b028b17be0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.999052] env[63028]: INFO nova.compute.manager [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Terminating instance [ 1108.057114] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e82345-1df0-4efb-828b-effe3e250e4d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.064788] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954f3b69-ae5d-4e03-80f7-ae4da8094526 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.095409] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c760d58-7e1e-40ab-b5df-c801e3c791c7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.102786] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e341e4-d109-469f-b8e3-7930ca1b2be6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.116014] env[63028]: DEBUG nova.compute.provider_tree [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.124819] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736339, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.144324] env[63028]: DEBUG oslo_vmware.api [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736337, 'name': ReconfigVM_Task, 'duration_secs': 0.695255} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.144862] env[63028]: DEBUG oslo_concurrency.lockutils [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.145107] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Reconfigured VM to attach interface {{(pid=63028) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1108.408538] env[63028]: DEBUG nova.compute.manager [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1108.408779] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1108.409699] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1cfa74-e105-43c3-8678-11672a0597b8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.417353] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1108.417591] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55868047-0d07-42b6-9bdb-12c1f23b5388 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.423187] env[63028]: DEBUG oslo_vmware.api [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1108.423187] env[63028]: value = "task-2736341" [ 1108.423187] env[63028]: _type = "Task" [ 1108.423187] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.431218] env[63028]: DEBUG oslo_vmware.api [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736341, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.450918] env[63028]: DEBUG nova.compute.manager [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1108.462308] env[63028]: DEBUG oslo_vmware.api [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736340, 'name': ReconfigVM_Task, 'duration_secs': 0.147931} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.462640] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550875', 'volume_id': '8a20dc5f-0351-4017-9a2d-4311f8616190', 'name': 'volume-8a20dc5f-0351-4017-9a2d-4311f8616190', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f804ec95-0b97-4960-844d-b678b97fc401', 'attached_at': '', 'detached_at': '', 'volume_id': '8a20dc5f-0351-4017-9a2d-4311f8616190', 'serial': '8a20dc5f-0351-4017-9a2d-4311f8616190'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1108.505986] env[63028]: DEBUG nova.compute.manager [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1108.506469] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1108.508080] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8c59af-77c1-4206-b3dc-96938ded5909 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.515589] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1108.515851] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cabe433f-61af-48ed-837f-ca77dc9f9194 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.522398] env[63028]: DEBUG oslo_vmware.api [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1108.522398] env[63028]: value = "task-2736342" [ 1108.522398] env[63028]: _type = "Task" [ 1108.522398] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.530871] env[63028]: DEBUG oslo_vmware.api [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.621726] env[63028]: DEBUG nova.scheduler.client.report [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.630239] env[63028]: DEBUG oslo_vmware.api [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736339, 'name': PowerOnVM_Task, 'duration_secs': 0.665542} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.630804] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1108.649507] env[63028]: DEBUG oslo_concurrency.lockutils [None req-384368a9-37e0-42cd-bf00-b60b6ed58e75 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-e048cadf-9dc1-4eb7-a825-422d0736231c-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.581s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.933968] env[63028]: DEBUG oslo_vmware.api [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736341, 'name': PowerOffVM_Task, 'duration_secs': 0.279549} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.934372] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1108.934532] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1108.934858] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f99681a8-f805-4155-9325-2de1a24dc22a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.972803] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.991547] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1108.991769] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1108.991950] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Deleting the datastore file [datastore1] 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1108.992240] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26588dd0-5a05-448e-b2f9-cd280cf72e30 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.999671] env[63028]: DEBUG oslo_vmware.api [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1108.999671] env[63028]: value = "task-2736344" [ 1108.999671] env[63028]: _type = "Task" [ 1108.999671] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.005358] env[63028]: DEBUG nova.objects.instance [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lazy-loading 'flavor' on Instance uuid f804ec95-0b97-4960-844d-b678b97fc401 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1109.010261] env[63028]: DEBUG oslo_vmware.api [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736344, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.031463] env[63028]: DEBUG oslo_vmware.api [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736342, 'name': PowerOffVM_Task, 'duration_secs': 0.214753} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.031715] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1109.031882] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1109.032142] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c23db61-c3ee-48b8-ba18-a4e8fd3f376e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.100033] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1109.100324] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1109.100595] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Deleting the datastore file [datastore1] 719e014f-0544-4832-81ae-26b028b17be0 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1109.100949] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9c0cfe9-b366-449e-9795-915e8c090e75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.108733] env[63028]: DEBUG oslo_vmware.api [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for the task: (returnval){ [ 1109.108733] env[63028]: value = "task-2736346" [ 1109.108733] env[63028]: _type = "Task" [ 1109.108733] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.118428] env[63028]: DEBUG oslo_vmware.api [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736346, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.132560] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.353s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.133155] env[63028]: DEBUG nova.compute.manager [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1109.136089] env[63028]: DEBUG oslo_concurrency.lockutils [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.538s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.136334] env[63028]: DEBUG nova.objects.instance [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Lazy-loading 'resources' on Instance uuid 2add1602-122e-41d7-af83-b71d8dab9288 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1109.512763] env[63028]: DEBUG oslo_vmware.api [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736344, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249725} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.512948] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1109.513148] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1109.513323] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1109.513498] env[63028]: INFO nova.compute.manager [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1109.513740] env[63028]: DEBUG oslo.service.loopingcall [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1109.515161] env[63028]: DEBUG nova.compute.manager [-] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1109.516048] env[63028]: DEBUG nova.network.neutron [-] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1109.618959] env[63028]: DEBUG oslo_vmware.api [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Task: {'id': task-2736346, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207605} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.619595] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1109.619807] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1109.619983] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1109.620207] env[63028]: INFO nova.compute.manager [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1109.620435] env[63028]: DEBUG oslo.service.loopingcall [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1109.620622] env[63028]: DEBUG nova.compute.manager [-] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1109.620715] env[63028]: DEBUG nova.network.neutron [-] [instance: 719e014f-0544-4832-81ae-26b028b17be0] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1109.645483] env[63028]: DEBUG nova.compute.utils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1109.650015] env[63028]: DEBUG nova.compute.manager [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1109.650173] env[63028]: DEBUG nova.network.neutron [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1109.681142] env[63028]: INFO nova.compute.manager [None req-8591df8e-c073-4f58-b4a9-6a93be3007ed tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance to original state: 'active' [ 1109.763128] env[63028]: DEBUG nova.policy [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '743cd51155e0498bb9b381d243afb624', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11332c2adbdc41928d4bf084435e2037', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1109.956359] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393fcf4c-3637-4a7d-9b31-2d57c2a58a55 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.964536] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4ed935-9df9-4051-9e57-d85b2bbc88fd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.995209] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5df805f-6a88-4386-ab4f-4f952a3765a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.002498] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d09817-9be0-42cd-b9b1-96adca28db9b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.015972] env[63028]: DEBUG nova.compute.provider_tree [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.021028] env[63028]: DEBUG oslo_concurrency.lockutils [None req-fc139c83-1d5b-4040-b4d1-de7c352945ba tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.258s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.150549] env[63028]: DEBUG nova.compute.manager [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1110.238345] env[63028]: DEBUG nova.compute.manager [req-b5bc4449-0149-459c-b8e5-acf1d190c958 req-c4e30170-eb4c-42dd-bdfe-fb1c8e2c73ce service nova] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Received event network-vif-deleted-4f055c6d-021a-4083-bc67-1c9a8e24f55e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1110.238549] env[63028]: INFO nova.compute.manager [req-b5bc4449-0149-459c-b8e5-acf1d190c958 req-c4e30170-eb4c-42dd-bdfe-fb1c8e2c73ce service nova] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Neutron deleted interface 4f055c6d-021a-4083-bc67-1c9a8e24f55e; detaching it from the instance and deleting it from the info cache [ 1110.238892] env[63028]: DEBUG nova.network.neutron [req-b5bc4449-0149-459c-b8e5-acf1d190c958 req-c4e30170-eb4c-42dd-bdfe-fb1c8e2c73ce service nova] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.518872] env[63028]: DEBUG nova.scheduler.client.report [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1110.523497] env[63028]: DEBUG nova.network.neutron [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Successfully created port: f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1110.699031] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "interface-e048cadf-9dc1-4eb7-a825-422d0736231c-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.699941] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-e048cadf-9dc1-4eb7-a825-422d0736231c-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.721594] env[63028]: DEBUG nova.network.neutron [-] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.742447] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-569c23a0-517b-4f7e-8a0c-2a828a3798e0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.752926] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8e5741-f3a2-4bd1-8028-edec9c168629 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.785627] env[63028]: DEBUG nova.compute.manager [req-b5bc4449-0149-459c-b8e5-acf1d190c958 req-c4e30170-eb4c-42dd-bdfe-fb1c8e2c73ce service nova] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Detach interface failed, port_id=4f055c6d-021a-4083-bc67-1c9a8e24f55e, reason: Instance 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1110.859372] env[63028]: DEBUG nova.network.neutron [-] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.886491] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "d41a1eae-bb89-4222-9466-d86af891c654" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.886804] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "d41a1eae-bb89-4222-9466-d86af891c654" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.887652] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "d41a1eae-bb89-4222-9466-d86af891c654-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.887652] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "d41a1eae-bb89-4222-9466-d86af891c654-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.887652] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "d41a1eae-bb89-4222-9466-d86af891c654-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.889360] env[63028]: INFO nova.compute.manager [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Terminating instance [ 1111.027821] env[63028]: DEBUG oslo_concurrency.lockutils [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.892s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.030155] env[63028]: DEBUG oslo_concurrency.lockutils [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.155s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.031657] env[63028]: INFO nova.compute.claims [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1111.048954] env[63028]: INFO nova.scheduler.client.report [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Deleted allocations for instance 2add1602-122e-41d7-af83-b71d8dab9288 [ 1111.160677] env[63028]: DEBUG nova.compute.manager [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1111.187083] env[63028]: DEBUG nova.virt.hardware [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1111.187325] env[63028]: DEBUG nova.virt.hardware [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1111.187484] env[63028]: DEBUG nova.virt.hardware [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1111.187664] env[63028]: DEBUG nova.virt.hardware [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1111.187808] env[63028]: DEBUG nova.virt.hardware [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1111.187954] env[63028]: DEBUG nova.virt.hardware [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1111.188176] env[63028]: DEBUG nova.virt.hardware [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1111.188335] env[63028]: DEBUG nova.virt.hardware [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1111.188503] env[63028]: DEBUG nova.virt.hardware [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1111.188665] env[63028]: DEBUG nova.virt.hardware [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1111.188838] env[63028]: DEBUG nova.virt.hardware [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1111.189697] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c20c0f-2ad9-40af-9bce-5265278378fa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.197014] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "f804ec95-0b97-4960-844d-b678b97fc401" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.197362] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.197445] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "f804ec95-0b97-4960-844d-b678b97fc401-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.197623] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.197790] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.200221] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73444549-02c6-4b4f-acb9-70ba2860f916 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.204135] env[63028]: INFO nova.compute.manager [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Terminating instance [ 1111.205813] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.206045] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.208027] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14beccdd-e79b-444a-9132-783ff6034267 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.231700] env[63028]: INFO nova.compute.manager [-] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Took 1.72 seconds to deallocate network for instance. [ 1111.233743] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b6098b-dca4-4215-a0a3-227e2d593512 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.261742] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Reconfiguring VM to detach interface {{(pid=63028) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1111.261742] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44af609b-9647-4529-8e4e-3293978a0af4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.282215] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1111.282215] env[63028]: value = "task-2736347" [ 1111.282215] env[63028]: _type = "Task" [ 1111.282215] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.289736] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.361566] env[63028]: INFO nova.compute.manager [-] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Took 1.74 seconds to deallocate network for instance. [ 1111.393607] env[63028]: DEBUG nova.compute.manager [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1111.393712] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1111.394017] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f88b526b-e4a0-4c0a-89d5-48306069bd7f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.404623] env[63028]: DEBUG oslo_vmware.api [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1111.404623] env[63028]: value = "task-2736348" [ 1111.404623] env[63028]: _type = "Task" [ 1111.404623] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.413338] env[63028]: DEBUG oslo_vmware.api [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736348, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.558821] env[63028]: DEBUG oslo_concurrency.lockutils [None req-af908094-999d-4595-820a-08496dcca3a3 tempest-ServerPasswordTestJSON-1929413695 tempest-ServerPasswordTestJSON-1929413695-project-member] Lock "2add1602-122e-41d7-af83-b71d8dab9288" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.935s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.713881] env[63028]: DEBUG nova.compute.manager [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1111.714222] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1111.715212] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9062ecc7-3b0c-40e1-82c2-0bc605638a60 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.727091] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1111.729275] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1eb1118-f63e-46d5-8725-739e1a651ce9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.735546] env[63028]: DEBUG oslo_vmware.api [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1111.735546] env[63028]: value = "task-2736349" [ 1111.735546] env[63028]: _type = "Task" [ 1111.735546] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.740496] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.743909] env[63028]: DEBUG oslo_vmware.api [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736349, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.793555] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.868683] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.916950] env[63028]: DEBUG oslo_vmware.api [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736348, 'name': PowerOffVM_Task, 'duration_secs': 0.213733} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.917250] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1111.917448] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1111.917644] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550854', 'volume_id': 'fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6', 'name': 'volume-fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'd41a1eae-bb89-4222-9466-d86af891c654', 'attached_at': '2025-02-20T18:03:26.000000', 'detached_at': '', 'volume_id': 'fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6', 'serial': 'fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1111.918743] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8e331b-ce8a-4216-bee2-a47ceea7bc30 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.946915] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6943bb1d-f4c0-4aed-95be-d7704d4ed5c4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.953938] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d41729-a330-4118-891b-8fe3bc8ddad7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.976052] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daefc81e-521b-470e-b641-38d0598ad06d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.993023] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] The volume has not been displaced from its original location: [datastore2] volume-fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6/volume-fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1111.998346] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfiguring VM instance instance-0000005a to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1111.998669] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87d25b31-4b88-4271-b5d7-39f3c8544683 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.016978] env[63028]: DEBUG oslo_vmware.api [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1112.016978] env[63028]: value = "task-2736350" [ 1112.016978] env[63028]: _type = "Task" [ 1112.016978] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.024548] env[63028]: DEBUG oslo_vmware.api [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736350, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.246420] env[63028]: DEBUG oslo_vmware.api [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736349, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.274742] env[63028]: DEBUG nova.compute.manager [req-c46c4e24-bb41-4826-8763-c3b41ba16f8a req-1cc59c2b-57f0-47b8-97b5-b296008e77ae service nova] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Received event network-vif-deleted-7807adb2-232e-40cb-b8af-cb7c31fdfc78 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1112.274962] env[63028]: DEBUG nova.compute.manager [req-c46c4e24-bb41-4826-8763-c3b41ba16f8a req-1cc59c2b-57f0-47b8-97b5-b296008e77ae service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Received event network-vif-plugged-f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1112.275252] env[63028]: DEBUG oslo_concurrency.lockutils [req-c46c4e24-bb41-4826-8763-c3b41ba16f8a req-1cc59c2b-57f0-47b8-97b5-b296008e77ae service nova] Acquiring lock "e5767896-8203-4b18-826f-dcb2fe02268e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.275470] env[63028]: DEBUG oslo_concurrency.lockutils [req-c46c4e24-bb41-4826-8763-c3b41ba16f8a req-1cc59c2b-57f0-47b8-97b5-b296008e77ae service nova] Lock "e5767896-8203-4b18-826f-dcb2fe02268e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.275632] env[63028]: DEBUG oslo_concurrency.lockutils [req-c46c4e24-bb41-4826-8763-c3b41ba16f8a req-1cc59c2b-57f0-47b8-97b5-b296008e77ae service nova] Lock "e5767896-8203-4b18-826f-dcb2fe02268e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.275790] env[63028]: DEBUG nova.compute.manager [req-c46c4e24-bb41-4826-8763-c3b41ba16f8a req-1cc59c2b-57f0-47b8-97b5-b296008e77ae service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] No waiting events found dispatching network-vif-plugged-f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1112.276545] env[63028]: WARNING nova.compute.manager [req-c46c4e24-bb41-4826-8763-c3b41ba16f8a req-1cc59c2b-57f0-47b8-97b5-b296008e77ae service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Received unexpected event network-vif-plugged-f16f5758-9834-448c-8002-199fff053deb for instance with vm_state building and task_state spawning. [ 1112.291486] env[63028]: DEBUG nova.network.neutron [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Successfully updated port: f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1112.295234] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.299788] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e847749-a6fb-43b4-be7b-d8a481e8919a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.308674] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4385180d-88a2-4a9c-84f8-1a2decf8c7dc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.343237] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7655bd-1ffb-4f88-9964-2dc135600cb6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.351596] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39bbd7cb-831f-4a78-951f-5f8e8ac1f865 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.365371] env[63028]: DEBUG nova.compute.provider_tree [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.529746] env[63028]: DEBUG oslo_vmware.api [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736350, 'name': ReconfigVM_Task, 'duration_secs': 0.206784} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.530323] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Reconfigured VM instance instance-0000005a to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1112.535823] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd246baa-83f0-4c20-b571-8facd39c24ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.552420] env[63028]: DEBUG oslo_vmware.api [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1112.552420] env[63028]: value = "task-2736351" [ 1112.552420] env[63028]: _type = "Task" [ 1112.552420] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.561830] env[63028]: DEBUG oslo_vmware.api [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736351, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.750528] env[63028]: DEBUG oslo_vmware.api [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736349, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.794109] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.796802] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.796945] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.797106] env[63028]: DEBUG nova.network.neutron [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1112.869123] env[63028]: DEBUG nova.scheduler.client.report [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1113.062683] env[63028]: DEBUG oslo_vmware.api [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736351, 'name': ReconfigVM_Task, 'duration_secs': 0.17254} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.063126] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550854', 'volume_id': 'fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6', 'name': 'volume-fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'd41a1eae-bb89-4222-9466-d86af891c654', 'attached_at': '2025-02-20T18:03:26.000000', 'detached_at': '', 'volume_id': 'fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6', 'serial': 'fc4d17f9-1fe2-4d10-a1c6-ede7f71f83a6'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1113.063335] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1113.064151] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1f17f4-7a3f-4af3-9a75-c230b03ea5f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.070867] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1113.071108] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d859bac-516d-4e3f-bfab-e6135aa71933 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.144250] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1113.144546] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1113.144715] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleting the datastore file [datastore1] d41a1eae-bb89-4222-9466-d86af891c654 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1113.145320] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41cb88ce-b0d6-47cc-9b44-cf046b6bc616 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.153680] env[63028]: DEBUG oslo_vmware.api [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1113.153680] env[63028]: value = "task-2736353" [ 1113.153680] env[63028]: _type = "Task" [ 1113.153680] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.161727] env[63028]: DEBUG oslo_vmware.api [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.246967] env[63028]: DEBUG oslo_vmware.api [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736349, 'name': PowerOffVM_Task, 'duration_secs': 1.193894} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.247271] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1113.247431] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1113.247684] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af31308a-7b29-4c71-9f4c-1f10131a7d1f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.296550] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.313083] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1113.313460] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1113.313764] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Deleting the datastore file [datastore2] f804ec95-0b97-4960-844d-b678b97fc401 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1113.314148] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de533f85-84d5-4621-9c00-15c12a7d24fa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.320766] env[63028]: DEBUG oslo_vmware.api [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for the task: (returnval){ [ 1113.320766] env[63028]: value = "task-2736355" [ 1113.320766] env[63028]: _type = "Task" [ 1113.320766] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.329211] env[63028]: DEBUG oslo_vmware.api [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.330808] env[63028]: DEBUG nova.network.neutron [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1113.372890] env[63028]: DEBUG oslo_concurrency.lockutils [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.373339] env[63028]: DEBUG nova.compute.manager [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1113.376310] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.831s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1113.377052] env[63028]: DEBUG nova.objects.instance [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lazy-loading 'resources' on Instance uuid a7ff444e-43bc-4925-9754-86ff30de6751 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1113.488742] env[63028]: DEBUG nova.network.neutron [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updating instance_info_cache with network_info: [{"id": "f16f5758-9834-448c-8002-199fff053deb", "address": "fa:16:3e:95:4e:63", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf16f5758-98", "ovs_interfaceid": "f16f5758-9834-448c-8002-199fff053deb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.664132] env[63028]: DEBUG oslo_vmware.api [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299244} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.664422] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.664617] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.664793] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.664969] env[63028]: INFO nova.compute.manager [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Took 2.27 seconds to destroy the instance on the hypervisor. [ 1113.665231] env[63028]: DEBUG oslo.service.loopingcall [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1113.665446] env[63028]: DEBUG nova.compute.manager [-] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1113.665545] env[63028]: DEBUG nova.network.neutron [-] [instance: d41a1eae-bb89-4222-9466-d86af891c654] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1113.797555] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.832196] env[63028]: DEBUG oslo_vmware.api [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Task: {'id': task-2736355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16421} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.832459] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.832646] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.832825] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.833015] env[63028]: INFO nova.compute.manager [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Took 2.12 seconds to destroy the instance on the hypervisor. [ 1113.833253] env[63028]: DEBUG oslo.service.loopingcall [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1113.833463] env[63028]: DEBUG nova.compute.manager [-] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1113.833568] env[63028]: DEBUG nova.network.neutron [-] [instance: f804ec95-0b97-4960-844d-b678b97fc401] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1113.879932] env[63028]: DEBUG nova.compute.utils [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1113.884082] env[63028]: DEBUG nova.compute.manager [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1113.884485] env[63028]: DEBUG nova.network.neutron [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1113.991826] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1113.992714] env[63028]: DEBUG nova.compute.manager [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Instance network_info: |[{"id": "f16f5758-9834-448c-8002-199fff053deb", "address": "fa:16:3e:95:4e:63", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf16f5758-98", "ovs_interfaceid": "f16f5758-9834-448c-8002-199fff053deb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1113.992946] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:4e:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f01bbee7-8b9a-46be-891e-59d8142fb359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f16f5758-9834-448c-8002-199fff053deb', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1114.002507] env[63028]: DEBUG oslo.service.loopingcall [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1114.007145] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1114.008385] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5629e3d-33e0-41df-a2f6-4ea9195ffc46 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.032414] env[63028]: DEBUG nova.policy [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3ed8f5b3d7b4be99d3b4649e156af58', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '847e89af959a4266ab55c1d2106ba8fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1114.040252] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1114.040252] env[63028]: value = "task-2736356" [ 1114.040252] env[63028]: _type = "Task" [ 1114.040252] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.051120] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736356, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.213875] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c74366-bb1e-4296-87c2-20aa7bc7dd39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.221882] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ff02b1-b432-4643-9c76-e2f0ee6fe8ae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.262444] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb5b7b2-7dfe-44ee-af0e-80ebf86426cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.271290] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c262b5-0aa2-4730-b7cc-c88aefd23204 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.290468] env[63028]: DEBUG nova.compute.provider_tree [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1114.300099] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.329525] env[63028]: DEBUG nova.compute.manager [req-deada06d-c363-4c2d-9cae-5e9e3b4df5ff req-6445c4a2-6d74-4f02-bcee-df69ca44ec32 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Received event network-changed-f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1114.329727] env[63028]: DEBUG nova.compute.manager [req-deada06d-c363-4c2d-9cae-5e9e3b4df5ff req-6445c4a2-6d74-4f02-bcee-df69ca44ec32 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Refreshing instance network info cache due to event network-changed-f16f5758-9834-448c-8002-199fff053deb. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1114.329936] env[63028]: DEBUG oslo_concurrency.lockutils [req-deada06d-c363-4c2d-9cae-5e9e3b4df5ff req-6445c4a2-6d74-4f02-bcee-df69ca44ec32 service nova] Acquiring lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1114.330713] env[63028]: DEBUG oslo_concurrency.lockutils [req-deada06d-c363-4c2d-9cae-5e9e3b4df5ff req-6445c4a2-6d74-4f02-bcee-df69ca44ec32 service nova] Acquired lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.330713] env[63028]: DEBUG nova.network.neutron [req-deada06d-c363-4c2d-9cae-5e9e3b4df5ff req-6445c4a2-6d74-4f02-bcee-df69ca44ec32 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Refreshing network info cache for port f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1114.385133] env[63028]: DEBUG nova.compute.manager [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1114.550855] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736356, 'name': CreateVM_Task, 'duration_secs': 0.32581} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.551037] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1114.551756] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1114.551925] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.552321] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1114.552565] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0daaba3c-3355-4376-8003-70c1b51a0d9c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.560012] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1114.560012] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c10400-d139-f380-beb2-d8c137ecb88f" [ 1114.560012] env[63028]: _type = "Task" [ 1114.560012] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.571359] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c10400-d139-f380-beb2-d8c137ecb88f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.783449] env[63028]: DEBUG nova.network.neutron [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Successfully created port: eda2613a-55b1-4516-80ce-192d52a6abe6 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1114.806083] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.821949] env[63028]: ERROR nova.scheduler.client.report [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] [req-49e7e544-b53c-499d-8fae-03379423a620] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-49e7e544-b53c-499d-8fae-03379423a620"}]} [ 1114.846646] env[63028]: DEBUG nova.scheduler.client.report [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Refreshing inventories for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1114.869295] env[63028]: DEBUG nova.scheduler.client.report [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Updating ProviderTree inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1114.869295] env[63028]: DEBUG nova.compute.provider_tree [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1114.885740] env[63028]: DEBUG nova.scheduler.client.report [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Refreshing aggregate associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, aggregates: None {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1114.890862] env[63028]: INFO nova.virt.block_device [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Booting with volume bc407df1-2bc3-4054-b5d6-b4b6863f2c75 at /dev/sda [ 1114.915339] env[63028]: DEBUG nova.scheduler.client.report [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Refreshing trait associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1114.946128] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4cccea6b-1199-45db-ac3f-9dd7f24107b6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.960506] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0dc09d-6720-49a7-8c7d-5ea514da8112 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.006123] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad61282f-3694-470d-a9c9-2c1e00850dc2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.014276] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80063c75-3149-4e28-b5ff-1e2b871486ab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.057403] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51c7a8f-dec2-4232-ba3b-cd42462359ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.074882] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24d7dea-25d0-4f53-a14f-c5be2dcbc773 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.078362] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c10400-d139-f380-beb2-d8c137ecb88f, 'name': SearchDatastore_Task, 'duration_secs': 0.014706} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.084723] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1115.084973] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1115.085229] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.085469] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.085544] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1115.086391] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d1a8f21-553d-462f-b3f0-3214bb9a4add {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.107772] env[63028]: DEBUG nova.virt.block_device [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating existing volume attachment record: 94f0e19c-dc02-4acd-b907-0cf3ff7afc6d {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1115.111104] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1115.111265] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1115.112344] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3bb4dc3-a4f2-45b7-a7d2-e9e900d6ed49 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.118440] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1115.118440] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fba7ee-ce00-2007-2a84-b671189bcdf6" [ 1115.118440] env[63028]: _type = "Task" [ 1115.118440] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.131023] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fba7ee-ce00-2007-2a84-b671189bcdf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.131023] env[63028]: DEBUG nova.network.neutron [-] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.249557] env[63028]: DEBUG nova.network.neutron [req-deada06d-c363-4c2d-9cae-5e9e3b4df5ff req-6445c4a2-6d74-4f02-bcee-df69ca44ec32 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updated VIF entry in instance network info cache for port f16f5758-9834-448c-8002-199fff053deb. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1115.250600] env[63028]: DEBUG nova.network.neutron [req-deada06d-c363-4c2d-9cae-5e9e3b4df5ff req-6445c4a2-6d74-4f02-bcee-df69ca44ec32 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updating instance_info_cache with network_info: [{"id": "f16f5758-9834-448c-8002-199fff053deb", "address": "fa:16:3e:95:4e:63", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf16f5758-98", "ovs_interfaceid": "f16f5758-9834-448c-8002-199fff053deb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.301439] env[63028]: DEBUG nova.network.neutron [-] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.301439] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0d2280-a40c-4365-a021-34a0faf006f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.308900] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.311676] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb3c8e2-3593-4ac5-96bc-5af2769cf49c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.348075] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d169ad2-3196-4899-8e61-7a2cd77f7108 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.355322] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c497bde9-5c3b-45c9-b4d9-70ba882635dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.370129] env[63028]: DEBUG nova.compute.provider_tree [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1115.628172] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52fba7ee-ce00-2007-2a84-b671189bcdf6, 'name': SearchDatastore_Task, 'duration_secs': 0.010311} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.628923] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c39d7f45-3590-4074-b71d-f01aee5d8d98 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.632580] env[63028]: INFO nova.compute.manager [-] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Took 1.97 seconds to deallocate network for instance. [ 1115.635659] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1115.635659] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5287c672-591a-9048-50e4-0a197c03cf15" [ 1115.635659] env[63028]: _type = "Task" [ 1115.635659] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.643403] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5287c672-591a-9048-50e4-0a197c03cf15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.755343] env[63028]: DEBUG oslo_concurrency.lockutils [req-deada06d-c363-4c2d-9cae-5e9e3b4df5ff req-6445c4a2-6d74-4f02-bcee-df69ca44ec32 service nova] Releasing lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1115.801752] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.804338] env[63028]: INFO nova.compute.manager [-] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Took 1.97 seconds to deallocate network for instance. [ 1116.054206] env[63028]: DEBUG nova.scheduler.client.report [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Updated inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with generation 152 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1116.054573] env[63028]: DEBUG nova.compute.provider_tree [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 152 to 153 during operation: update_inventory {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1116.054777] env[63028]: DEBUG nova.compute.provider_tree [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1116.146911] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5287c672-591a-9048-50e4-0a197c03cf15, 'name': SearchDatastore_Task, 'duration_secs': 0.010052} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.147206] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1116.147460] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] e5767896-8203-4b18-826f-dcb2fe02268e/e5767896-8203-4b18-826f-dcb2fe02268e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1116.147725] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0197ef9-399f-4b0c-b9bb-c9c26f58cb1f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.154269] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1116.154269] env[63028]: value = "task-2736357" [ 1116.154269] env[63028]: _type = "Task" [ 1116.154269] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.161724] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736357, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.175233] env[63028]: INFO nova.compute.manager [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Took 0.54 seconds to detach 1 volumes for instance. [ 1116.302390] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.310812] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.373734] env[63028]: DEBUG nova.compute.manager [req-ab785edc-90b4-4bc5-9a2a-e73154c2be2e req-50aa4245-3330-423c-aeef-5aa941b7439c service nova] [instance: d41a1eae-bb89-4222-9466-d86af891c654] Received event network-vif-deleted-c5f1d585-d624-4525-a5b2-132b18bf9378 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1116.373734] env[63028]: DEBUG nova.compute.manager [req-ab785edc-90b4-4bc5-9a2a-e73154c2be2e req-50aa4245-3330-423c-aeef-5aa941b7439c service nova] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Received event network-vif-deleted-33f3a6b3-ea0e-4b6c-a0c5-80d9100ff94e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1116.562037] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.183s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.563079] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.399s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.568125] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.002s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.568125] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63028) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1116.568125] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.797s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.568125] env[63028]: INFO nova.compute.claims [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1116.579579] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209d2d0c-af1d-4dc7-a931-d0e20dc402ef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.590419] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926a1f18-3a0e-4225-a968-5d1ea6c28e17 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.599204] env[63028]: INFO nova.scheduler.client.report [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Deleted allocations for instance a7ff444e-43bc-4925-9754-86ff30de6751 [ 1116.616816] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0b94d2-3698-4e63-b575-9b89f7ab0cf2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.626743] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cea7db-cfee-4af5-9043-290cc3386c70 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.659972] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178950MB free_disk=111GB free_vcpus=48 pci_devices=None {{(pid=63028) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1116.660471] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.669219] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736357, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470617} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.669655] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] e5767896-8203-4b18-826f-dcb2fe02268e/e5767896-8203-4b18-826f-dcb2fe02268e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1116.670094] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1116.670493] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c4f674c-2ead-44f7-a21c-467244e350f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.676729] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1116.676729] env[63028]: value = "task-2736358" [ 1116.676729] env[63028]: _type = "Task" [ 1116.676729] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.680503] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.685185] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736358, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.798767] env[63028]: DEBUG nova.network.neutron [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Successfully updated port: eda2613a-55b1-4516-80ce-192d52a6abe6 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1116.805952] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.118500] env[63028]: DEBUG oslo_concurrency.lockutils [None req-37c28f8e-9d2f-4a53-bb11-705a4a3a6321 tempest-ServerDiskConfigTestJSON-1072642600 tempest-ServerDiskConfigTestJSON-1072642600-project-member] Lock "a7ff444e-43bc-4925-9754-86ff30de6751" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.238s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.187223] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736358, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068728} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.187424] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1117.188184] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680d3174-1872-4451-a7cd-2ea651a90dba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.210531] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] e5767896-8203-4b18-826f-dcb2fe02268e/e5767896-8203-4b18-826f-dcb2fe02268e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1117.211709] env[63028]: DEBUG nova.compute.manager [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1117.212204] env[63028]: DEBUG nova.virt.hardware [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1117.212415] env[63028]: DEBUG nova.virt.hardware [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1117.212569] env[63028]: DEBUG nova.virt.hardware [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1117.212744] env[63028]: DEBUG nova.virt.hardware [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1117.212887] env[63028]: DEBUG nova.virt.hardware [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1117.213056] env[63028]: DEBUG nova.virt.hardware [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1117.213267] env[63028]: DEBUG nova.virt.hardware [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1117.213424] env[63028]: DEBUG nova.virt.hardware [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1117.213589] env[63028]: DEBUG nova.virt.hardware [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1117.213748] env[63028]: DEBUG nova.virt.hardware [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1117.213917] env[63028]: DEBUG nova.virt.hardware [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1117.214168] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-609d2818-6d9d-4aec-9013-e64c99cf8b8e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.229647] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ded734-54c3-4927-9237-620c41409ebb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.239280] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf98907-8c98-47fd-894f-ac7b95adc6a7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.243508] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1117.243508] env[63028]: value = "task-2736359" [ 1117.243508] env[63028]: _type = "Task" [ 1117.243508] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.258931] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736359, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.302128] env[63028]: DEBUG oslo_concurrency.lockutils [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1117.302128] env[63028]: DEBUG oslo_concurrency.lockutils [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.302128] env[63028]: DEBUG nova.network.neutron [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1117.306738] env[63028]: DEBUG oslo_vmware.api [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736347, 'name': ReconfigVM_Task, 'duration_secs': 5.826007} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.307233] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.307453] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Reconfigured VM to detach interface {{(pid=63028) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1117.756054] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736359, 'name': ReconfigVM_Task, 'duration_secs': 0.2723} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.756548] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Reconfigured VM instance instance-00000070 to attach disk [datastore2] e5767896-8203-4b18-826f-dcb2fe02268e/e5767896-8203-4b18-826f-dcb2fe02268e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1117.757029] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-22358a12-deb9-4526-ad6c-45d51a6ff98c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.763175] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1117.763175] env[63028]: value = "task-2736360" [ 1117.763175] env[63028]: _type = "Task" [ 1117.763175] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.774188] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736360, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.815006] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f48d36-70ce-45d2-afe7-197f3dd83909 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.822573] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49eaa2a3-0444-4533-8cd8-d832b1de4466 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.856472] env[63028]: DEBUG nova.network.neutron [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1117.858981] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4233ecf5-d885-426d-8597-d46d525fb01e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.866828] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1413a9a-2135-4bdc-925a-3fa8ed5306f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.880236] env[63028]: DEBUG nova.compute.provider_tree [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1118.006483] env[63028]: DEBUG nova.network.neutron [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance_info_cache with network_info: [{"id": "eda2613a-55b1-4516-80ce-192d52a6abe6", "address": "fa:16:3e:43:ad:63", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeda2613a-55", "ovs_interfaceid": "eda2613a-55b1-4516-80ce-192d52a6abe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.272987] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736360, 'name': Rename_Task, 'duration_secs': 0.126872} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.273376] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1118.273626] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5574450c-6870-422b-b1cb-93ace032e844 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.279021] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1118.279021] env[63028]: value = "task-2736361" [ 1118.279021] env[63028]: _type = "Task" [ 1118.279021] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.286398] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736361, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.422921] env[63028]: DEBUG nova.scheduler.client.report [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Updated inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with generation 153 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1118.423225] env[63028]: DEBUG nova.compute.provider_tree [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 153 to 154 during operation: update_inventory {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1118.423624] env[63028]: DEBUG nova.compute.provider_tree [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1118.428398] env[63028]: DEBUG nova.compute.manager [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Received event network-vif-plugged-eda2613a-55b1-4516-80ce-192d52a6abe6 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1118.428611] env[63028]: DEBUG oslo_concurrency.lockutils [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] Acquiring lock "092c7673-97fb-4085-852c-04a7c19a73e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.428807] env[63028]: DEBUG oslo_concurrency.lockutils [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] Lock "092c7673-97fb-4085-852c-04a7c19a73e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.428970] env[63028]: DEBUG oslo_concurrency.lockutils [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] Lock "092c7673-97fb-4085-852c-04a7c19a73e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.429154] env[63028]: DEBUG nova.compute.manager [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] No waiting events found dispatching network-vif-plugged-eda2613a-55b1-4516-80ce-192d52a6abe6 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1118.429319] env[63028]: WARNING nova.compute.manager [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Received unexpected event network-vif-plugged-eda2613a-55b1-4516-80ce-192d52a6abe6 for instance with vm_state building and task_state spawning. [ 1118.429476] env[63028]: DEBUG nova.compute.manager [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Received event network-changed-eda2613a-55b1-4516-80ce-192d52a6abe6 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1118.429626] env[63028]: DEBUG nova.compute.manager [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Refreshing instance network info cache due to event network-changed-eda2613a-55b1-4516-80ce-192d52a6abe6. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1118.429788] env[63028]: DEBUG oslo_concurrency.lockutils [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] Acquiring lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1118.509259] env[63028]: DEBUG oslo_concurrency.lockutils [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1118.509591] env[63028]: DEBUG nova.compute.manager [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Instance network_info: |[{"id": "eda2613a-55b1-4516-80ce-192d52a6abe6", "address": "fa:16:3e:43:ad:63", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeda2613a-55", "ovs_interfaceid": "eda2613a-55b1-4516-80ce-192d52a6abe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1118.509886] env[63028]: DEBUG oslo_concurrency.lockutils [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] Acquired lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.510078] env[63028]: DEBUG nova.network.neutron [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Refreshing network info cache for port eda2613a-55b1-4516-80ce-192d52a6abe6 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1118.511253] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:ad:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eda2613a-55b1-4516-80ce-192d52a6abe6', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1118.519100] env[63028]: DEBUG oslo.service.loopingcall [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1118.522169] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1118.522647] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25d83d83-9355-4ea9-8161-dc4d8d2d39ec {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.542444] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1118.542444] env[63028]: value = "task-2736362" [ 1118.542444] env[63028]: _type = "Task" [ 1118.542444] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.556627] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736362, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.579608] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1118.579784] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.579950] env[63028]: DEBUG nova.network.neutron [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1118.779861] env[63028]: DEBUG nova.network.neutron [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updated VIF entry in instance network info cache for port eda2613a-55b1-4516-80ce-192d52a6abe6. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1118.780337] env[63028]: DEBUG nova.network.neutron [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance_info_cache with network_info: [{"id": "eda2613a-55b1-4516-80ce-192d52a6abe6", "address": "fa:16:3e:43:ad:63", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeda2613a-55", "ovs_interfaceid": "eda2613a-55b1-4516-80ce-192d52a6abe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.803632] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736361, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.934020] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.934020] env[63028]: DEBUG nova.compute.manager [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1118.935831] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.856s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.936234] env[63028]: DEBUG nova.objects.instance [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lazy-loading 'resources' on Instance uuid 1d008794-3c1a-46c6-b4eb-3d5441efdb22 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.052307] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736362, 'name': CreateVM_Task, 'duration_secs': 0.339004} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.052507] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1119.053184] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'guest_format': None, 'disk_bus': None, 'mount_device': '/dev/sda', 'attachment_id': '94f0e19c-dc02-4acd-b907-0cf3ff7afc6d', 'boot_index': 0, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550871', 'volume_id': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'name': 'volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '092c7673-97fb-4085-852c-04a7c19a73e7', 'attached_at': '', 'detached_at': '', 'volume_id': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'serial': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75'}, 'volume_type': None}], 'swap': None} {{(pid=63028) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1119.053402] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Root volume attach. Driver type: vmdk {{(pid=63028) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1119.054181] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3258296f-e511-4a71-a1bb-f26b3de00b65 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.061498] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369de3ea-3c7a-4eb9-8e9f-faf56218fed3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.067762] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2410b67a-506b-4900-b41d-f36ea814e96b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.073681] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-4b2048db-cb07-4318-ad1c-22560e6a3d74 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.079869] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1119.079869] env[63028]: value = "task-2736363" [ 1119.079869] env[63028]: _type = "Task" [ 1119.079869] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.095264] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736363, 'name': RelocateVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.292035] env[63028]: DEBUG oslo_concurrency.lockutils [req-1105858f-174a-4d09-8c92-f87f8e30f0ff req-e8b16f3a-41a6-45b7-963d-26bd972cc728 service nova] Releasing lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.296439] env[63028]: DEBUG oslo_vmware.api [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736361, 'name': PowerOnVM_Task, 'duration_secs': 0.554878} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.296862] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1119.297081] env[63028]: INFO nova.compute.manager [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Took 8.14 seconds to spawn the instance on the hypervisor. [ 1119.297265] env[63028]: DEBUG nova.compute.manager [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1119.298343] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6382710-f934-47f3-b2fb-409ea17e1de4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.426507] env[63028]: INFO nova.network.neutron [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Port dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1119.428055] env[63028]: DEBUG nova.network.neutron [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updating instance_info_cache with network_info: [{"id": "60891063-6c30-480a-8e2b-f3960496f2fd", "address": "fa:16:3e:84:9a:c5", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60891063-6c", "ovs_interfaceid": "60891063-6c30-480a-8e2b-f3960496f2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.445382] env[63028]: DEBUG nova.compute.utils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1119.447153] env[63028]: DEBUG nova.compute.manager [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1119.447153] env[63028]: DEBUG nova.network.neutron [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1119.519151] env[63028]: DEBUG nova.policy [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c022ca18b0a41ce9d790fa25f6ebf8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea26842446ec4691a6456a6659188704', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1119.592674] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736363, 'name': RelocateVM_Task, 'duration_secs': 0.028424} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.597332] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1119.597740] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550871', 'volume_id': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'name': 'volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '092c7673-97fb-4085-852c-04a7c19a73e7', 'attached_at': '', 'detached_at': '', 'volume_id': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'serial': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1119.599125] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe8e161-e6aa-497e-bfba-b4a8acd42d31 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.621802] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc4b1d9-6ad9-40b2-9124-263486cac4cd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.646496] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75/volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1119.649799] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-026af455-3cf7-45a7-9f2c-28fad9caa7fe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.670990] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1119.670990] env[63028]: value = "task-2736364" [ 1119.670990] env[63028]: _type = "Task" [ 1119.670990] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.680948] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736364, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.787381] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa2e275-ba00-49ae-b50d-9b192eedfd98 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.796503] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8188bcb2-599b-4aad-afec-3673d5be64f7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.833930] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765d2d33-4482-4d99-abb9-b0af9dc088e8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.838762] env[63028]: INFO nova.compute.manager [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Took 21.70 seconds to build instance. [ 1119.845023] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cee4858-39af-4aa8-b2d1-ca54d895f8d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.856744] env[63028]: DEBUG nova.compute.provider_tree [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.913025] env[63028]: DEBUG nova.network.neutron [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Successfully created port: 54b45b5c-ea6a-4064-b224-29875b66f9a1 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1119.929938] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.949019] env[63028]: DEBUG nova.compute.manager [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1120.180986] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736364, 'name': ReconfigVM_Task, 'duration_secs': 0.275992} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.181273] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Reconfigured VM instance instance-00000071 to attach disk [datastore2] volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75/volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1120.186087] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-383a16fa-8159-4a08-be26-32fda0b2d62c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.200781] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1120.200781] env[63028]: value = "task-2736365" [ 1120.200781] env[63028]: _type = "Task" [ 1120.200781] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.208910] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736365, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.235111] env[63028]: DEBUG nova.compute.manager [req-12fb82f5-0845-4d7b-95ea-993175c818a6 req-4406d632-591e-48fd-bc7a-66c3f9206939 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Received event network-changed-f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1120.235215] env[63028]: DEBUG nova.compute.manager [req-12fb82f5-0845-4d7b-95ea-993175c818a6 req-4406d632-591e-48fd-bc7a-66c3f9206939 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Refreshing instance network info cache due to event network-changed-f16f5758-9834-448c-8002-199fff053deb. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1120.235477] env[63028]: DEBUG oslo_concurrency.lockutils [req-12fb82f5-0845-4d7b-95ea-993175c818a6 req-4406d632-591e-48fd-bc7a-66c3f9206939 service nova] Acquiring lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1120.235780] env[63028]: DEBUG oslo_concurrency.lockutils [req-12fb82f5-0845-4d7b-95ea-993175c818a6 req-4406d632-591e-48fd-bc7a-66c3f9206939 service nova] Acquired lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.235871] env[63028]: DEBUG nova.network.neutron [req-12fb82f5-0845-4d7b-95ea-993175c818a6 req-4406d632-591e-48fd-bc7a-66c3f9206939 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Refreshing network info cache for port f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1120.347651] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4449cd42-ad53-4e8c-abd8-2aebc5abfa82 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "e5767896-8203-4b18-826f-dcb2fe02268e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.220s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.359678] env[63028]: DEBUG nova.scheduler.client.report [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1120.434690] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e2e3db1-d349-4d80-82d9-b7c6fd83aa38 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-e048cadf-9dc1-4eb7-a825-422d0736231c-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.734s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.459803] env[63028]: DEBUG nova.compute.manager [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Received event network-changed-60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1120.459856] env[63028]: DEBUG nova.compute.manager [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Refreshing instance network info cache due to event network-changed-60891063-6c30-480a-8e2b-f3960496f2fd. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1120.460109] env[63028]: DEBUG oslo_concurrency.lockutils [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] Acquiring lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1120.460213] env[63028]: DEBUG oslo_concurrency.lockutils [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] Acquired lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.460373] env[63028]: DEBUG nova.network.neutron [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Refreshing network info cache for port 60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1120.476196] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "interface-899496ae-8463-42e0-a287-b141d956fa0a-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.476449] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-899496ae-8463-42e0-a287-b141d956fa0a-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.476793] env[63028]: DEBUG nova.objects.instance [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'flavor' on Instance uuid 899496ae-8463-42e0-a287-b141d956fa0a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1120.711281] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736365, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.866602] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.931s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.868701] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.896s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.870124] env[63028]: INFO nova.compute.claims [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1120.890908] env[63028]: INFO nova.scheduler.client.report [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Deleted allocations for instance 1d008794-3c1a-46c6-b4eb-3d5441efdb22 [ 1120.956327] env[63028]: DEBUG nova.compute.manager [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1120.986361] env[63028]: DEBUG nova.network.neutron [req-12fb82f5-0845-4d7b-95ea-993175c818a6 req-4406d632-591e-48fd-bc7a-66c3f9206939 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updated VIF entry in instance network info cache for port f16f5758-9834-448c-8002-199fff053deb. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1120.986709] env[63028]: DEBUG nova.network.neutron [req-12fb82f5-0845-4d7b-95ea-993175c818a6 req-4406d632-591e-48fd-bc7a-66c3f9206939 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updating instance_info_cache with network_info: [{"id": "f16f5758-9834-448c-8002-199fff053deb", "address": "fa:16:3e:95:4e:63", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf16f5758-98", "ovs_interfaceid": "f16f5758-9834-448c-8002-199fff053deb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.989632] env[63028]: DEBUG nova.virt.hardware [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1120.989853] env[63028]: DEBUG nova.virt.hardware [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1120.990018] env[63028]: DEBUG nova.virt.hardware [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1120.990203] env[63028]: DEBUG nova.virt.hardware [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1120.990347] env[63028]: DEBUG nova.virt.hardware [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1120.990492] env[63028]: DEBUG nova.virt.hardware [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1120.990694] env[63028]: DEBUG nova.virt.hardware [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1120.990851] env[63028]: DEBUG nova.virt.hardware [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1120.991021] env[63028]: DEBUG nova.virt.hardware [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1120.991188] env[63028]: DEBUG nova.virt.hardware [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1120.991361] env[63028]: DEBUG nova.virt.hardware [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1120.992252] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747dd565-02c0-419a-81d3-811e9f133f7d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.000553] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6582d6dc-50f0-4940-9220-458092beba25 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.124987] env[63028]: DEBUG nova.objects.instance [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'pci_requests' on Instance uuid 899496ae-8463-42e0-a287-b141d956fa0a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1121.212168] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736365, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.215340] env[63028]: DEBUG nova.network.neutron [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updated VIF entry in instance network info cache for port 60891063-6c30-480a-8e2b-f3960496f2fd. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1121.215666] env[63028]: DEBUG nova.network.neutron [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updating instance_info_cache with network_info: [{"id": "60891063-6c30-480a-8e2b-f3960496f2fd", "address": "fa:16:3e:84:9a:c5", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60891063-6c", "ovs_interfaceid": "60891063-6c30-480a-8e2b-f3960496f2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.400126] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3666919c-7162-48c7-88b9-57d0a5663d42 tempest-ServerRescueNegativeTestJSON-1840407136 tempest-ServerRescueNegativeTestJSON-1840407136-project-member] Lock "1d008794-3c1a-46c6-b4eb-3d5441efdb22" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.722s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.434224] env[63028]: DEBUG nova.network.neutron [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Successfully updated port: 54b45b5c-ea6a-4064-b224-29875b66f9a1 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1121.496024] env[63028]: DEBUG oslo_concurrency.lockutils [req-12fb82f5-0845-4d7b-95ea-993175c818a6 req-4406d632-591e-48fd-bc7a-66c3f9206939 service nova] Releasing lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1121.629016] env[63028]: DEBUG nova.objects.base [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Object Instance<899496ae-8463-42e0-a287-b141d956fa0a> lazy-loaded attributes: flavor,pci_requests {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1121.629016] env[63028]: DEBUG nova.network.neutron [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1121.691535] env[63028]: DEBUG nova.policy [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b48f3f2a85945379bdb33bf153bde9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25a6457f62d149629c09589feb1a550c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1121.712955] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736365, 'name': ReconfigVM_Task, 'duration_secs': 1.126328} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.713331] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550871', 'volume_id': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'name': 'volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '092c7673-97fb-4085-852c-04a7c19a73e7', 'attached_at': '', 'detached_at': '', 'volume_id': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'serial': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1121.713918] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d742eb7c-b4d0-4cd5-8963-d604bdd083ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.718211] env[63028]: DEBUG oslo_concurrency.lockutils [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] Releasing lock "refresh_cache-e048cadf-9dc1-4eb7-a825-422d0736231c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1121.718502] env[63028]: DEBUG nova.compute.manager [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Received event network-changed-197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1121.718717] env[63028]: DEBUG nova.compute.manager [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Refreshing instance network info cache due to event network-changed-197b3459-f9f1-4fe3-a9ad-169350b4d637. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1121.718960] env[63028]: DEBUG oslo_concurrency.lockutils [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] Acquiring lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.719153] env[63028]: DEBUG oslo_concurrency.lockutils [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] Acquired lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.719356] env[63028]: DEBUG nova.network.neutron [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Refreshing network info cache for port 197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1121.723956] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1121.723956] env[63028]: value = "task-2736366" [ 1121.723956] env[63028]: _type = "Task" [ 1121.723956] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.732712] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736366, 'name': Rename_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.937661] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "refresh_cache-629a3b6f-a74b-4193-bcf4-fc67a1752d5b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.938081] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "refresh_cache-629a3b6f-a74b-4193-bcf4-fc67a1752d5b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.938409] env[63028]: DEBUG nova.network.neutron [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1122.114071] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6ab559-b2c4-4da8-a17b-9c61c51963af {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.119881] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02120a84-fb32-43b5-961f-d3e7f3774b35 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.153037] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6e3376-47e6-4303-8815-d67d2e78b532 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.161256] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234531dd-df4b-4526-8a56-0a355196b932 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.177419] env[63028]: DEBUG nova.compute.provider_tree [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.236680] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736366, 'name': Rename_Task, 'duration_secs': 0.144579} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.236950] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1122.237282] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c337fca-fabe-40eb-a1d1-46f4498b0ca9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.244863] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1122.244863] env[63028]: value = "task-2736367" [ 1122.244863] env[63028]: _type = "Task" [ 1122.244863] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.253022] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736367, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.443574] env[63028]: DEBUG nova.network.neutron [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updated VIF entry in instance network info cache for port 197b3459-f9f1-4fe3-a9ad-169350b4d637. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1122.444180] env[63028]: DEBUG nova.network.neutron [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updating instance_info_cache with network_info: [{"id": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "address": "fa:16:3e:cb:8a:7c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap197b3459-f9", "ovs_interfaceid": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.473988] env[63028]: DEBUG nova.network.neutron [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1122.484147] env[63028]: DEBUG nova.compute.manager [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Received event network-vif-plugged-54b45b5c-ea6a-4064-b224-29875b66f9a1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1122.484390] env[63028]: DEBUG oslo_concurrency.lockutils [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] Acquiring lock "629a3b6f-a74b-4193-bcf4-fc67a1752d5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.484643] env[63028]: DEBUG oslo_concurrency.lockutils [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] Lock "629a3b6f-a74b-4193-bcf4-fc67a1752d5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.485343] env[63028]: DEBUG oslo_concurrency.lockutils [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] Lock "629a3b6f-a74b-4193-bcf4-fc67a1752d5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.485343] env[63028]: DEBUG nova.compute.manager [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] No waiting events found dispatching network-vif-plugged-54b45b5c-ea6a-4064-b224-29875b66f9a1 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1122.485343] env[63028]: WARNING nova.compute.manager [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Received unexpected event network-vif-plugged-54b45b5c-ea6a-4064-b224-29875b66f9a1 for instance with vm_state building and task_state spawning. [ 1122.485476] env[63028]: DEBUG nova.compute.manager [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Received event network-changed-54b45b5c-ea6a-4064-b224-29875b66f9a1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1122.485606] env[63028]: DEBUG nova.compute.manager [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Refreshing instance network info cache due to event network-changed-54b45b5c-ea6a-4064-b224-29875b66f9a1. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1122.485772] env[63028]: DEBUG oslo_concurrency.lockutils [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] Acquiring lock "refresh_cache-629a3b6f-a74b-4193-bcf4-fc67a1752d5b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1122.602506] env[63028]: DEBUG nova.network.neutron [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Updating instance_info_cache with network_info: [{"id": "54b45b5c-ea6a-4064-b224-29875b66f9a1", "address": "fa:16:3e:fe:bb:a6", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b45b5c-ea", "ovs_interfaceid": "54b45b5c-ea6a-4064-b224-29875b66f9a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.681829] env[63028]: DEBUG nova.scheduler.client.report [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.755907] env[63028]: DEBUG oslo_vmware.api [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736367, 'name': PowerOnVM_Task, 'duration_secs': 0.489095} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.755907] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1122.755907] env[63028]: INFO nova.compute.manager [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Took 5.54 seconds to spawn the instance on the hypervisor. [ 1122.756086] env[63028]: DEBUG nova.compute.manager [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1122.757086] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a994a50c-208c-45f6-b0ae-efa794304965 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.948337] env[63028]: DEBUG oslo_concurrency.lockutils [req-6dd90386-2cba-43fe-9505-a58123acef37 req-046fa36f-0597-48ef-a408-34c38fe12c05 service nova] Releasing lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1123.105565] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "refresh_cache-629a3b6f-a74b-4193-bcf4-fc67a1752d5b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1123.106021] env[63028]: DEBUG nova.compute.manager [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Instance network_info: |[{"id": "54b45b5c-ea6a-4064-b224-29875b66f9a1", "address": "fa:16:3e:fe:bb:a6", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b45b5c-ea", "ovs_interfaceid": "54b45b5c-ea6a-4064-b224-29875b66f9a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1123.107088] env[63028]: DEBUG oslo_concurrency.lockutils [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] Acquired lock "refresh_cache-629a3b6f-a74b-4193-bcf4-fc67a1752d5b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.107088] env[63028]: DEBUG nova.network.neutron [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Refreshing network info cache for port 54b45b5c-ea6a-4064-b224-29875b66f9a1 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1123.108019] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:bb:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '706c9762-1cf8-4770-897d-377d0d927773', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54b45b5c-ea6a-4064-b224-29875b66f9a1', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1123.117821] env[63028]: DEBUG oslo.service.loopingcall [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1123.118587] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1123.119173] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d1061cf-b308-48a6-8e82-102e7e854b97 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.141222] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1123.141222] env[63028]: value = "task-2736368" [ 1123.141222] env[63028]: _type = "Task" [ 1123.141222] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.149251] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736368, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.190023] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.190023] env[63028]: DEBUG nova.compute.manager [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1123.190969] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.450s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.191363] env[63028]: DEBUG nova.objects.instance [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lazy-loading 'resources' on Instance uuid 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1123.238939] env[63028]: DEBUG nova.network.neutron [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Successfully updated port: dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1123.273134] env[63028]: INFO nova.compute.manager [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Took 23.42 seconds to build instance. [ 1123.651528] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736368, 'name': CreateVM_Task, 'duration_secs': 0.298645} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.651802] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1123.652322] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.652498] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.652806] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1123.653066] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a610213-801a-4f8d-aab9-4fa0263e7107 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.657079] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1123.657079] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523512aa-f5c5-49ee-4b92-7ab03e6cea15" [ 1123.657079] env[63028]: _type = "Task" [ 1123.657079] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.664110] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523512aa-f5c5-49ee-4b92-7ab03e6cea15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.695422] env[63028]: DEBUG nova.compute.utils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1123.696873] env[63028]: DEBUG nova.compute.manager [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1123.697062] env[63028]: DEBUG nova.network.neutron [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1123.743566] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.743657] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.743787] env[63028]: DEBUG nova.network.neutron [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1123.766791] env[63028]: DEBUG nova.policy [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43ed2fb3f1a944fdac8ee7778f171cd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8efc6d89903c454eb39136a76e0adef5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1123.774695] env[63028]: DEBUG oslo_concurrency.lockutils [None req-99cedbf3-0a36-40f6-8b01-7499b21e23d7 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "092c7673-97fb-4085-852c-04a7c19a73e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.932s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.881176] env[63028]: DEBUG nova.network.neutron [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Updated VIF entry in instance network info cache for port 54b45b5c-ea6a-4064-b224-29875b66f9a1. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1123.881568] env[63028]: DEBUG nova.network.neutron [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Updating instance_info_cache with network_info: [{"id": "54b45b5c-ea6a-4064-b224-29875b66f9a1", "address": "fa:16:3e:fe:bb:a6", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b45b5c-ea", "ovs_interfaceid": "54b45b5c-ea6a-4064-b224-29875b66f9a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.915188] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4b01f5-dccf-4e75-96f9-4c7ff5f74e79 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.922892] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a54a30e-c7d2-433e-82a4-442c68452c92 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.959501] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd4aa74-7cfc-479d-9625-3d8d71e97a54 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.968388] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abe0590-acec-439f-a50e-8ef7c13a76a5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.985357] env[63028]: DEBUG nova.compute.provider_tree [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.114928] env[63028]: DEBUG nova.network.neutron [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Successfully created port: f5759890-6244-4b8a-9a03-6d628f2441b7 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1124.167813] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523512aa-f5c5-49ee-4b92-7ab03e6cea15, 'name': SearchDatastore_Task, 'duration_secs': 0.022022} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.168116] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.168346] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1124.168578] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1124.169024] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.169024] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1124.169170] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fb69e60-0282-4938-a5d0-7ae49c5fc866 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.182160] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1124.182360] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1124.183102] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f19b1449-f6ae-4476-9e8d-c99249c4b983 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.188513] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1124.188513] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bf5cea-5afc-3d7c-ee70-2d7effe75272" [ 1124.188513] env[63028]: _type = "Task" [ 1124.188513] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.195431] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bf5cea-5afc-3d7c-ee70-2d7effe75272, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.202836] env[63028]: DEBUG nova.compute.manager [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1124.290075] env[63028]: WARNING nova.network.neutron [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] c2f1496c-e3fd-43db-a032-12cdacdb4e46 already exists in list: networks containing: ['c2f1496c-e3fd-43db-a032-12cdacdb4e46']. ignoring it [ 1124.384703] env[63028]: DEBUG oslo_concurrency.lockutils [req-fa65016a-59d3-46dc-bfcb-bdad55d510f0 req-fca935a5-4bc2-4759-b2bb-196da20c1ef9 service nova] Releasing lock "refresh_cache-629a3b6f-a74b-4193-bcf4-fc67a1752d5b" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.488230] env[63028]: DEBUG nova.scheduler.client.report [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1124.538543] env[63028]: DEBUG nova.compute.manager [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Received event network-vif-plugged-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1124.538777] env[63028]: DEBUG oslo_concurrency.lockutils [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] Acquiring lock "899496ae-8463-42e0-a287-b141d956fa0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.538986] env[63028]: DEBUG oslo_concurrency.lockutils [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] Lock "899496ae-8463-42e0-a287-b141d956fa0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.539175] env[63028]: DEBUG oslo_concurrency.lockutils [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] Lock "899496ae-8463-42e0-a287-b141d956fa0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.539438] env[63028]: DEBUG nova.compute.manager [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] No waiting events found dispatching network-vif-plugged-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1124.539647] env[63028]: WARNING nova.compute.manager [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Received unexpected event network-vif-plugged-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 for instance with vm_state active and task_state None. [ 1124.539817] env[63028]: DEBUG nova.compute.manager [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Received event network-changed-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1124.539972] env[63028]: DEBUG nova.compute.manager [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Refreshing instance network info cache due to event network-changed-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1124.540160] env[63028]: DEBUG oslo_concurrency.lockutils [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] Acquiring lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1124.623790] env[63028]: DEBUG nova.network.neutron [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updating instance_info_cache with network_info: [{"id": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "address": "fa:16:3e:cb:8a:7c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap197b3459-f9", "ovs_interfaceid": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7", "address": "fa:16:3e:b4:a8:3d", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfcd47cc-53", "ovs_interfaceid": "dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.699844] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52bf5cea-5afc-3d7c-ee70-2d7effe75272, 'name': SearchDatastore_Task, 'duration_secs': 0.024865} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.703167] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4fb5938-0a2c-4fc5-a65e-4e91a02076f4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.717859] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1124.717859] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e75239-b2e0-1142-62e0-95c444b59de6" [ 1124.717859] env[63028]: _type = "Task" [ 1124.717859] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.725262] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e75239-b2e0-1142-62e0-95c444b59de6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.754196] env[63028]: DEBUG nova.compute.manager [req-cb877f16-98f9-4db8-a159-c6196f155a0e req-88effc60-a988-424d-bcd5-1ca4eb198588 service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Received event network-changed-7d007428-6d28-49a8-aa26-6b6ec99613c2 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1124.754196] env[63028]: DEBUG nova.compute.manager [req-cb877f16-98f9-4db8-a159-c6196f155a0e req-88effc60-a988-424d-bcd5-1ca4eb198588 service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Refreshing instance network info cache due to event network-changed-7d007428-6d28-49a8-aa26-6b6ec99613c2. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1124.754196] env[63028]: DEBUG oslo_concurrency.lockutils [req-cb877f16-98f9-4db8-a159-c6196f155a0e req-88effc60-a988-424d-bcd5-1ca4eb198588 service nova] Acquiring lock "refresh_cache-d6137c80-0c09-4655-b264-472753b4fa9c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1124.754196] env[63028]: DEBUG oslo_concurrency.lockutils [req-cb877f16-98f9-4db8-a159-c6196f155a0e req-88effc60-a988-424d-bcd5-1ca4eb198588 service nova] Acquired lock "refresh_cache-d6137c80-0c09-4655-b264-472753b4fa9c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.754749] env[63028]: DEBUG nova.network.neutron [req-cb877f16-98f9-4db8-a159-c6196f155a0e req-88effc60-a988-424d-bcd5-1ca4eb198588 service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Refreshing network info cache for port 7d007428-6d28-49a8-aa26-6b6ec99613c2 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1124.993591] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.802s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.996697] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.128s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.996973] env[63028]: DEBUG nova.objects.instance [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lazy-loading 'resources' on Instance uuid 719e014f-0544-4832-81ae-26b028b17be0 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1125.015968] env[63028]: INFO nova.scheduler.client.report [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Deleted allocations for instance 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4 [ 1125.126655] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1125.127374] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1125.127533] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.127817] env[63028]: DEBUG oslo_concurrency.lockutils [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] Acquired lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.127997] env[63028]: DEBUG nova.network.neutron [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Refreshing network info cache for port dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1125.129700] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26b4835-2cee-45df-927d-63238a964f97 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.148295] env[63028]: DEBUG nova.virt.hardware [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1125.148539] env[63028]: DEBUG nova.virt.hardware [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1125.148695] env[63028]: DEBUG nova.virt.hardware [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1125.148873] env[63028]: DEBUG nova.virt.hardware [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1125.149028] env[63028]: DEBUG nova.virt.hardware [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1125.149179] env[63028]: DEBUG nova.virt.hardware [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1125.149385] env[63028]: DEBUG nova.virt.hardware [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1125.149541] env[63028]: DEBUG nova.virt.hardware [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1125.149733] env[63028]: DEBUG nova.virt.hardware [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1125.149906] env[63028]: DEBUG nova.virt.hardware [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1125.150093] env[63028]: DEBUG nova.virt.hardware [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1125.156680] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Reconfiguring VM to attach interface {{(pid=63028) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1125.156866] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb60a62a-8f89-43ab-845d-5a550eeea62a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.175033] env[63028]: DEBUG oslo_vmware.api [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1125.175033] env[63028]: value = "task-2736369" [ 1125.175033] env[63028]: _type = "Task" [ 1125.175033] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.183606] env[63028]: DEBUG oslo_vmware.api [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736369, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.216659] env[63028]: DEBUG nova.compute.manager [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1125.229528] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e75239-b2e0-1142-62e0-95c444b59de6, 'name': SearchDatastore_Task, 'duration_secs': 0.041638} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.229645] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1125.229945] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 629a3b6f-a74b-4193-bcf4-fc67a1752d5b/629a3b6f-a74b-4193-bcf4-fc67a1752d5b.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1125.230265] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9ea62e3-51a8-46ac-9047-a46ab1beec61 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.244780] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1125.244780] env[63028]: value = "task-2736370" [ 1125.244780] env[63028]: _type = "Task" [ 1125.244780] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.250477] env[63028]: DEBUG nova.virt.hardware [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1125.250702] env[63028]: DEBUG nova.virt.hardware [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1125.250848] env[63028]: DEBUG nova.virt.hardware [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1125.251038] env[63028]: DEBUG nova.virt.hardware [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1125.251190] env[63028]: DEBUG nova.virt.hardware [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1125.251341] env[63028]: DEBUG nova.virt.hardware [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1125.251551] env[63028]: DEBUG nova.virt.hardware [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1125.251709] env[63028]: DEBUG nova.virt.hardware [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1125.251870] env[63028]: DEBUG nova.virt.hardware [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1125.252035] env[63028]: DEBUG nova.virt.hardware [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1125.252239] env[63028]: DEBUG nova.virt.hardware [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1125.253032] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9cefcc-1983-405a-a4f3-1aad7303f93c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.261667] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736370, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.265132] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fddb6ff-d92c-4b1c-a346-f67914a1537e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.526048] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1488e619-bdb7-4b84-9595-e9372908e0d7 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.630s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.571596] env[63028]: DEBUG nova.network.neutron [req-cb877f16-98f9-4db8-a159-c6196f155a0e req-88effc60-a988-424d-bcd5-1ca4eb198588 service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Updated VIF entry in instance network info cache for port 7d007428-6d28-49a8-aa26-6b6ec99613c2. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1125.571955] env[63028]: DEBUG nova.network.neutron [req-cb877f16-98f9-4db8-a159-c6196f155a0e req-88effc60-a988-424d-bcd5-1ca4eb198588 service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Updating instance_info_cache with network_info: [{"id": "7d007428-6d28-49a8-aa26-6b6ec99613c2", "address": "fa:16:3e:11:ca:39", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d007428-6d", "ovs_interfaceid": "7d007428-6d28-49a8-aa26-6b6ec99613c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.693529] env[63028]: DEBUG oslo_vmware.api [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736369, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.750946] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a09fde-eaa7-46fd-82ff-789c7de789bf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.763562] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736370, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.764672] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a7a26e-48ae-49ec-9344-8dfbc12a5c55 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.798965] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0161d486-48a9-4818-a097-440b0284d0c8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.806574] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d380c18-89d3-4c62-a458-b880a02f989f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.819962] env[63028]: DEBUG nova.compute.provider_tree [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1125.893233] env[63028]: DEBUG nova.network.neutron [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Successfully updated port: f5759890-6244-4b8a-9a03-6d628f2441b7 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1125.959990] env[63028]: DEBUG nova.network.neutron [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updated VIF entry in instance network info cache for port dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1125.959990] env[63028]: DEBUG nova.network.neutron [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updating instance_info_cache with network_info: [{"id": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "address": "fa:16:3e:cb:8a:7c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap197b3459-f9", "ovs_interfaceid": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7", "address": "fa:16:3e:b4:a8:3d", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfcd47cc-53", "ovs_interfaceid": "dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.075387] env[63028]: DEBUG oslo_concurrency.lockutils [req-cb877f16-98f9-4db8-a159-c6196f155a0e req-88effc60-a988-424d-bcd5-1ca4eb198588 service nova] Releasing lock "refresh_cache-d6137c80-0c09-4655-b264-472753b4fa9c" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1126.187505] env[63028]: DEBUG oslo_vmware.api [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736369, 'name': ReconfigVM_Task, 'duration_secs': 0.651016} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.188015] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1126.188230] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Reconfigured VM to attach interface {{(pid=63028) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1126.255108] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736370, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535175} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.255389] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] 629a3b6f-a74b-4193-bcf4-fc67a1752d5b/629a3b6f-a74b-4193-bcf4-fc67a1752d5b.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1126.255606] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1126.255857] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2bf8fde2-fc7f-4057-9b00-8a8505d4a4c3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.261802] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1126.261802] env[63028]: value = "task-2736371" [ 1126.261802] env[63028]: _type = "Task" [ 1126.261802] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.270446] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736371, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.323842] env[63028]: DEBUG nova.scheduler.client.report [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1126.399032] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.399180] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.399344] env[63028]: DEBUG nova.network.neutron [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1126.418895] env[63028]: DEBUG nova.compute.manager [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Stashing vm_state: active {{(pid=63028) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1126.461403] env[63028]: DEBUG oslo_concurrency.lockutils [req-89f1322f-1009-4a59-a4c6-d2e63e0e1fd3 req-2fa7a851-0828-4426-b43b-c659f62d0307 service nova] Releasing lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1126.677351] env[63028]: DEBUG nova.compute.manager [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Received event network-vif-plugged-f5759890-6244-4b8a-9a03-6d628f2441b7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1126.677612] env[63028]: DEBUG oslo_concurrency.lockutils [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] Acquiring lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.677792] env[63028]: DEBUG oslo_concurrency.lockutils [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] Lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.677960] env[63028]: DEBUG oslo_concurrency.lockutils [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] Lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.678224] env[63028]: DEBUG nova.compute.manager [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] No waiting events found dispatching network-vif-plugged-f5759890-6244-4b8a-9a03-6d628f2441b7 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1126.678509] env[63028]: WARNING nova.compute.manager [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Received unexpected event network-vif-plugged-f5759890-6244-4b8a-9a03-6d628f2441b7 for instance with vm_state building and task_state spawning. [ 1126.678594] env[63028]: DEBUG nova.compute.manager [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Received event network-changed-f5759890-6244-4b8a-9a03-6d628f2441b7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1126.678754] env[63028]: DEBUG nova.compute.manager [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Refreshing instance network info cache due to event network-changed-f5759890-6244-4b8a-9a03-6d628f2441b7. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1126.678946] env[63028]: DEBUG oslo_concurrency.lockutils [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] Acquiring lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.692414] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fe89979-da5d-4c42-ab4e-ca05197b8da3 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-899496ae-8463-42e0-a287-b141d956fa0a-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.216s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.771678] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736371, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066241} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.771942] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1126.772727] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41efe65-9bd2-442c-b6fd-92e7975df0a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.797457] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 629a3b6f-a74b-4193-bcf4-fc67a1752d5b/629a3b6f-a74b-4193-bcf4-fc67a1752d5b.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1126.797782] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cddb58a-9d46-4f13-85b8-9b16e4f94fb9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.813795] env[63028]: DEBUG nova.compute.manager [req-5961b093-05b9-4549-af96-9de54e1ffcf5 req-d6bde873-fa05-4ca5-8efe-9b92a1738679 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Received event network-changed-eda2613a-55b1-4516-80ce-192d52a6abe6 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1126.813989] env[63028]: DEBUG nova.compute.manager [req-5961b093-05b9-4549-af96-9de54e1ffcf5 req-d6bde873-fa05-4ca5-8efe-9b92a1738679 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Refreshing instance network info cache due to event network-changed-eda2613a-55b1-4516-80ce-192d52a6abe6. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1126.814219] env[63028]: DEBUG oslo_concurrency.lockutils [req-5961b093-05b9-4549-af96-9de54e1ffcf5 req-d6bde873-fa05-4ca5-8efe-9b92a1738679 service nova] Acquiring lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.814374] env[63028]: DEBUG oslo_concurrency.lockutils [req-5961b093-05b9-4549-af96-9de54e1ffcf5 req-d6bde873-fa05-4ca5-8efe-9b92a1738679 service nova] Acquired lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.814530] env[63028]: DEBUG nova.network.neutron [req-5961b093-05b9-4549-af96-9de54e1ffcf5 req-d6bde873-fa05-4ca5-8efe-9b92a1738679 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Refreshing network info cache for port eda2613a-55b1-4516-80ce-192d52a6abe6 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1126.821334] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1126.821334] env[63028]: value = "task-2736372" [ 1126.821334] env[63028]: _type = "Task" [ 1126.821334] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.828344] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.832s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.833189] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.522s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.833413] env[63028]: DEBUG nova.objects.instance [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lazy-loading 'resources' on Instance uuid f804ec95-0b97-4960-844d-b678b97fc401 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1126.834757] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736372, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.849008] env[63028]: INFO nova.scheduler.client.report [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Deleted allocations for instance 719e014f-0544-4832-81ae-26b028b17be0 [ 1126.932220] env[63028]: DEBUG nova.network.neutron [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1126.937370] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.076308] env[63028]: DEBUG nova.network.neutron [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance_info_cache with network_info: [{"id": "f5759890-6244-4b8a-9a03-6d628f2441b7", "address": "fa:16:3e:61:25:8b", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5759890-62", "ovs_interfaceid": "f5759890-6244-4b8a-9a03-6d628f2441b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.331028] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736372, 'name': ReconfigVM_Task, 'duration_secs': 0.283878} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.331702] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 629a3b6f-a74b-4193-bcf4-fc67a1752d5b/629a3b6f-a74b-4193-bcf4-fc67a1752d5b.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1127.331925] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eccf5f7e-4c76-4566-af10-4e411ee36a25 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.338725] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1127.338725] env[63028]: value = "task-2736373" [ 1127.338725] env[63028]: _type = "Task" [ 1127.338725] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.349347] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736373, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.358855] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b27f49ac-bd48-4aa8-9a8a-4f2ef75e4656 tempest-MultipleCreateTestJSON-393833464 tempest-MultipleCreateTestJSON-393833464-project-member] Lock "719e014f-0544-4832-81ae-26b028b17be0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.363s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.545545] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3f2393-7752-4ba1-a3e6-cbd854e40f26 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.553015] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58864b99-1b86-4408-9a4b-11e030b03cf2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.582386] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1127.582721] env[63028]: DEBUG nova.compute.manager [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Instance network_info: |[{"id": "f5759890-6244-4b8a-9a03-6d628f2441b7", "address": "fa:16:3e:61:25:8b", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5759890-62", "ovs_interfaceid": "f5759890-6244-4b8a-9a03-6d628f2441b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1127.583225] env[63028]: DEBUG oslo_concurrency.lockutils [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] Acquired lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.583412] env[63028]: DEBUG nova.network.neutron [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Refreshing network info cache for port f5759890-6244-4b8a-9a03-6d628f2441b7 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1127.587602] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:25:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5759890-6244-4b8a-9a03-6d628f2441b7', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1127.592789] env[63028]: DEBUG oslo.service.loopingcall [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1127.593610] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df49253f-c8d9-46e1-b21f-d0a36d441629 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.596824] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1127.597058] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e90d80b-cbf2-4af4-ae82-91c49d0ed5ab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.619997] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f94450-94d1-47c3-9989-12da4e0114f7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.623969] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1127.623969] env[63028]: value = "task-2736374" [ 1127.623969] env[63028]: _type = "Task" [ 1127.623969] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.638820] env[63028]: DEBUG nova.compute.provider_tree [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.643200] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736374, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.809322] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.809322] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.837866] env[63028]: DEBUG nova.network.neutron [req-5961b093-05b9-4549-af96-9de54e1ffcf5 req-d6bde873-fa05-4ca5-8efe-9b92a1738679 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updated VIF entry in instance network info cache for port eda2613a-55b1-4516-80ce-192d52a6abe6. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1127.837866] env[63028]: DEBUG nova.network.neutron [req-5961b093-05b9-4549-af96-9de54e1ffcf5 req-d6bde873-fa05-4ca5-8efe-9b92a1738679 service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance_info_cache with network_info: [{"id": "eda2613a-55b1-4516-80ce-192d52a6abe6", "address": "fa:16:3e:43:ad:63", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeda2613a-55", "ovs_interfaceid": "eda2613a-55b1-4516-80ce-192d52a6abe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.851267] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736373, 'name': Rename_Task, 'duration_secs': 0.136764} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.851382] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1127.851637] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5c717ff-51e2-4057-83e3-c51ef6fa8ce5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.858210] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1127.858210] env[63028]: value = "task-2736375" [ 1127.858210] env[63028]: _type = "Task" [ 1127.858210] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.867581] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736375, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.134210] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736374, 'name': CreateVM_Task, 'duration_secs': 0.356553} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.134415] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1128.135098] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.135268] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.135681] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1128.136157] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a12a642-4c20-4f1f-9aca-e70d2a062fa6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.141207] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1128.141207] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a394b2-0f1e-621a-6fb9-b0a74e6d8fc8" [ 1128.141207] env[63028]: _type = "Task" [ 1128.141207] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.145195] env[63028]: DEBUG nova.scheduler.client.report [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1128.152936] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a394b2-0f1e-621a-6fb9-b0a74e6d8fc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.312313] env[63028]: DEBUG nova.compute.utils [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1128.324110] env[63028]: DEBUG nova.network.neutron [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updated VIF entry in instance network info cache for port f5759890-6244-4b8a-9a03-6d628f2441b7. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1128.324498] env[63028]: DEBUG nova.network.neutron [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance_info_cache with network_info: [{"id": "f5759890-6244-4b8a-9a03-6d628f2441b7", "address": "fa:16:3e:61:25:8b", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5759890-62", "ovs_interfaceid": "f5759890-6244-4b8a-9a03-6d628f2441b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.340514] env[63028]: DEBUG oslo_concurrency.lockutils [req-5961b093-05b9-4549-af96-9de54e1ffcf5 req-d6bde873-fa05-4ca5-8efe-9b92a1738679 service nova] Releasing lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.369747] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736375, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.497742] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "interface-899496ae-8463-42e0-a287-b141d956fa0a-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.498072] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-899496ae-8463-42e0-a287-b141d956fa0a-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.651680] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a394b2-0f1e-621a-6fb9-b0a74e6d8fc8, 'name': SearchDatastore_Task, 'duration_secs': 0.058039} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.652145] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.652265] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1128.652529] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.652678] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.652861] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1128.653131] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7253bcc8-2306-4dc8-9deb-40ee9f2b1425 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.655750] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.822s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.657758] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 11.997s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.665747] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1128.665934] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1128.666707] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82bb183b-337b-49bd-8fe0-87519caa7534 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.671738] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1128.671738] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52af8de4-bca7-1ef5-4941-69359dce61b4" [ 1128.671738] env[63028]: _type = "Task" [ 1128.671738] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.681390] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52af8de4-bca7-1ef5-4941-69359dce61b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.682427] env[63028]: INFO nova.scheduler.client.report [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Deleted allocations for instance f804ec95-0b97-4960-844d-b678b97fc401 [ 1128.816250] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.827115] env[63028]: DEBUG oslo_concurrency.lockutils [req-5fc3a508-81c6-45d1-b656-cfd6b126c623 req-5a0b45b5-4044-45a6-ae2d-4aa9094b8f39 service nova] Releasing lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.873779] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736375, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.001385] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.001385] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.002683] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba2abf0-8abd-4a5d-bfb5-3aa874766e00 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.020528] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0304e0f-d521-43dc-b795-bab70bd20c70 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.046727] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Reconfiguring VM to detach interface {{(pid=63028) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1129.047292] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f72fea1d-b22c-4c86-aa4a-a56a571d786f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.067114] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1129.067114] env[63028]: value = "task-2736376" [ 1129.067114] env[63028]: _type = "Task" [ 1129.067114] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.075810] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.182058] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52af8de4-bca7-1ef5-4941-69359dce61b4, 'name': SearchDatastore_Task, 'duration_secs': 0.008877} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.183673] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-789612d9-7147-4b70-ae75-c1e2efaef084 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.190496] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1129.190496] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5260a6cc-c892-68d1-bcaa-6b203a8c5397" [ 1129.190496] env[63028]: _type = "Task" [ 1129.190496] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.190749] env[63028]: DEBUG oslo_concurrency.lockutils [None req-d1ed4a58-e6cc-4d78-8183-95b663141949 tempest-AttachVolumeTestJSON-1048691326 tempest-AttachVolumeTestJSON-1048691326-project-member] Lock "f804ec95-0b97-4960-844d-b678b97fc401" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.993s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.199012] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5260a6cc-c892-68d1-bcaa-6b203a8c5397, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.370929] env[63028]: DEBUG oslo_vmware.api [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736375, 'name': PowerOnVM_Task, 'duration_secs': 1.081298} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.371656] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1129.371656] env[63028]: INFO nova.compute.manager [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Took 8.41 seconds to spawn the instance on the hypervisor. [ 1129.371656] env[63028]: DEBUG nova.compute.manager [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1129.372471] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24617eec-a18d-40bf-9f32-9474f03afd88 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.578088] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.670091] env[63028]: INFO nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating resource usage from migration 580a6c87-377c-4c7e-ada0-3e213c6f38f8 [ 1129.692535] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance d6137c80-0c09-4655-b264-472753b4fa9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.692535] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.692535] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 79f4ef22-a589-4d5c-8832-5d5dcdd55561 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.692819] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 60d18f14-536a-4b0f-912b-21f3f5a30d28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.692819] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance e048cadf-9dc1-4eb7-a825-422d0736231c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.692896] env[63028]: WARNING nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance d41a1eae-bb89-4222-9466-d86af891c654 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1129.693020] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 4ec96b68-2fdb-4150-8d26-53fdf79c8e26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.693146] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 899496ae-8463-42e0-a287-b141d956fa0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.693255] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 6865f832-d409-4b9b-8b6c-33b0bf07d2b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.693364] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance e5767896-8203-4b18-826f-dcb2fe02268e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.693476] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 629a3b6f-a74b-4193-bcf4-fc67a1752d5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.693585] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance f96d4bcd-a032-4e4d-94e4-12d7013d5e3f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.693708] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Migration 580a6c87-377c-4c7e-ada0-3e213c6f38f8 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1129.693879] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 092c7673-97fb-4085-852c-04a7c19a73e7 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.694110] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1129.694248] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2816MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1129.705505] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5260a6cc-c892-68d1-bcaa-6b203a8c5397, 'name': SearchDatastore_Task, 'duration_secs': 0.010218} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.705765] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1129.706015] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] f96d4bcd-a032-4e4d-94e4-12d7013d5e3f/f96d4bcd-a032-4e4d-94e4-12d7013d5e3f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1129.706275] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-969cc807-7af5-4dce-8f0e-9e6066befd7b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.712486] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1129.712486] env[63028]: value = "task-2736377" [ 1129.712486] env[63028]: _type = "Task" [ 1129.712486] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.720122] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.857930] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebebdb9c-985c-4f83-b10e-df826cfe747a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.865463] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db9de79-75b6-4ca9-9c1c-ea8c20d1596d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.898548] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.898787] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.899013] env[63028]: INFO nova.compute.manager [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Attaching volume a3edae11-41bd-48cc-9a17-b3d918c822c4 to /dev/sdb [ 1129.902944] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a43069-548d-4c28-87ef-245ac0a4e9c4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.906008] env[63028]: INFO nova.compute.manager [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Took 23.16 seconds to build instance. [ 1129.914885] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5c6275-f155-448e-a704-2d8f37183ed3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.930342] env[63028]: DEBUG nova.compute.provider_tree [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.938165] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9b70ea-ea46-4272-a5a0-67f81eda9577 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.945940] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d9bd93-56db-4748-bd25-6435437dfc27 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.960264] env[63028]: DEBUG nova.virt.block_device [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Updating existing volume attachment record: 75f212b5-bf0e-4b5c-bb23-95506f098b8a {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1130.079022] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.225170] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736377, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.410429] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adaba13b-512a-4572-b6cc-ff868e6f461c tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "629a3b6f-a74b-4193-bcf4-fc67a1752d5b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.666s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.434373] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1130.579247] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.723656] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.697126} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.723919] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] f96d4bcd-a032-4e4d-94e4-12d7013d5e3f/f96d4bcd-a032-4e4d-94e4-12d7013d5e3f.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1130.724163] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1130.724505] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-34071855-f722-4182-9d71-1f37dcc40b71 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.731572] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1130.731572] env[63028]: value = "task-2736380" [ 1130.731572] env[63028]: _type = "Task" [ 1130.731572] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.739256] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736380, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.940704] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1130.941113] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.283s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.941318] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.261s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.941525] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.943923] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.007s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.945569] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.945766] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Cleaning up deleted instances {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1130.975274] env[63028]: INFO nova.scheduler.client.report [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleted allocations for instance d41a1eae-bb89-4222-9466-d86af891c654 [ 1131.082953] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.084580] env[63028]: DEBUG oslo_concurrency.lockutils [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "629a3b6f-a74b-4193-bcf4-fc67a1752d5b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.084901] env[63028]: DEBUG oslo_concurrency.lockutils [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "629a3b6f-a74b-4193-bcf4-fc67a1752d5b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.085226] env[63028]: DEBUG oslo_concurrency.lockutils [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "629a3b6f-a74b-4193-bcf4-fc67a1752d5b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.085510] env[63028]: DEBUG oslo_concurrency.lockutils [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "629a3b6f-a74b-4193-bcf4-fc67a1752d5b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.085767] env[63028]: DEBUG oslo_concurrency.lockutils [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "629a3b6f-a74b-4193-bcf4-fc67a1752d5b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.088467] env[63028]: INFO nova.compute.manager [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Terminating instance [ 1131.243911] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736380, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.163786} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.244231] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1131.245064] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2038c5-659f-49e9-967f-89be4bf6509e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.268321] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] f96d4bcd-a032-4e4d-94e4-12d7013d5e3f/f96d4bcd-a032-4e4d-94e4-12d7013d5e3f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1131.268611] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-162fb75b-0066-49fa-b895-d0758d00cc7b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.289144] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1131.289144] env[63028]: value = "task-2736381" [ 1131.289144] env[63028]: _type = "Task" [ 1131.289144] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.297161] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736381, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.451462] env[63028]: INFO nova.compute.claims [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1131.469061] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] There are 63 instances to clean {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1131.469240] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 719e014f-0544-4832-81ae-26b028b17be0] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1131.485030] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1ec95544-745f-4244-8290-be64012a93cf tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "d41a1eae-bb89-4222-9466-d86af891c654" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.598s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.581216] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.595534] env[63028]: DEBUG nova.compute.manager [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1131.595534] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1131.595534] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13cbd72-7411-42ec-9bb7-fbd35bad3418 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.602862] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1131.603155] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c13d255-5c29-4bb3-9894-116412dc9186 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.609150] env[63028]: DEBUG oslo_vmware.api [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1131.609150] env[63028]: value = "task-2736382" [ 1131.609150] env[63028]: _type = "Task" [ 1131.609150] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.616905] env[63028]: DEBUG oslo_vmware.api [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.800059] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736381, 'name': ReconfigVM_Task, 'duration_secs': 0.283617} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.800059] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Reconfigured VM instance instance-00000073 to attach disk [datastore2] f96d4bcd-a032-4e4d-94e4-12d7013d5e3f/f96d4bcd-a032-4e4d-94e4-12d7013d5e3f.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1131.800300] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bcef83c-57a8-415f-868a-c6bac3e5335b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.805843] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1131.805843] env[63028]: value = "task-2736383" [ 1131.805843] env[63028]: _type = "Task" [ 1131.805843] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.813795] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736383, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.973301] env[63028]: INFO nova.compute.resource_tracker [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating resource usage from migration 580a6c87-377c-4c7e-ada0-3e213c6f38f8 [ 1131.976702] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 3d91b47d-a5f7-4a10-aefb-1ad9c84c63e4] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1132.082953] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.118797] env[63028]: DEBUG oslo_vmware.api [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736382, 'name': PowerOffVM_Task, 'duration_secs': 0.199251} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.121724] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1132.121905] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1132.122372] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e69b1ee8-bf05-42bb-8edb-14f02de70d08 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.142734] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e32faac-06e4-4704-a75a-8e8c2529eb9d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.151476] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff29da8-7fbd-4479-b1ed-f67b045651f6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.183633] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b2c979-7356-4417-af7b-9f7e723955da {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.186374] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1132.186591] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1132.186871] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleting the datastore file [datastore2] 629a3b6f-a74b-4193-bcf4-fc67a1752d5b {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1132.187111] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1400b2b-bbb0-4369-b7f2-7838c4054598 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.194788] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7408d67d-ef94-4bad-a07c-95debd9badf3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.198675] env[63028]: DEBUG oslo_vmware.api [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1132.198675] env[63028]: value = "task-2736386" [ 1132.198675] env[63028]: _type = "Task" [ 1132.198675] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.209605] env[63028]: DEBUG nova.compute.provider_tree [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1132.216105] env[63028]: DEBUG oslo_vmware.api [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.315043] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736383, 'name': Rename_Task, 'duration_secs': 0.130279} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.315337] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1132.315581] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23ccdda6-e094-4158-ad21-295e6cd3defc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.321753] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1132.321753] env[63028]: value = "task-2736387" [ 1132.321753] env[63028]: _type = "Task" [ 1132.321753] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.329642] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736387, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.480894] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: a7ff444e-43bc-4925-9754-86ff30de6751] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1132.582328] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.677395] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "b438b12e-874a-4883-b606-c28258e5a01a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.677669] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "b438b12e-874a-4883-b606-c28258e5a01a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.708497] env[63028]: DEBUG oslo_vmware.api [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.315024} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.708744] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1132.708926] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1132.709114] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1132.709282] env[63028]: INFO nova.compute.manager [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1132.709506] env[63028]: DEBUG oslo.service.loopingcall [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1132.709687] env[63028]: DEBUG nova.compute.manager [-] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1132.709774] env[63028]: DEBUG nova.network.neutron [-] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1132.712404] env[63028]: DEBUG nova.scheduler.client.report [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1132.832074] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736387, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.984361] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 2add1602-122e-41d7-af83-b71d8dab9288] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1132.999235] env[63028]: DEBUG nova.compute.manager [req-716934bb-d845-4e16-8281-c1f3c7f0cb0c req-1723a910-22c8-4182-8614-9d69a9d83507 service nova] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Received event network-vif-deleted-54b45b5c-ea6a-4064-b224-29875b66f9a1 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1132.999337] env[63028]: INFO nova.compute.manager [req-716934bb-d845-4e16-8281-c1f3c7f0cb0c req-1723a910-22c8-4182-8614-9d69a9d83507 service nova] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Neutron deleted interface 54b45b5c-ea6a-4064-b224-29875b66f9a1; detaching it from the instance and deleting it from the info cache [ 1132.999859] env[63028]: DEBUG nova.network.neutron [req-716934bb-d845-4e16-8281-c1f3c7f0cb0c req-1723a910-22c8-4182-8614-9d69a9d83507 service nova] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.083926] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.179597] env[63028]: DEBUG nova.compute.manager [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1133.216775] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.273s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.216991] env[63028]: INFO nova.compute.manager [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Migrating [ 1133.333145] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736387, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.476129] env[63028]: DEBUG nova.network.neutron [-] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.487435] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 1f8415cc-f544-4c89-9863-43d5ae9144e8] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1133.502242] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01f0e417-1eae-41ca-a710-d6a4212216a2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.511966] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915b1089-f7f5-4772-b68b-cf9e93f77c45 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.547145] env[63028]: DEBUG nova.compute.manager [req-716934bb-d845-4e16-8281-c1f3c7f0cb0c req-1723a910-22c8-4182-8614-9d69a9d83507 service nova] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Detach interface failed, port_id=54b45b5c-ea6a-4064-b224-29875b66f9a1, reason: Instance 629a3b6f-a74b-4193-bcf4-fc67a1752d5b could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1133.583854] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.700642] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.700896] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.702381] env[63028]: INFO nova.compute.claims [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1133.731421] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.731421] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.731581] env[63028]: DEBUG nova.network.neutron [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1133.833177] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736387, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.979554] env[63028]: INFO nova.compute.manager [-] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Took 1.27 seconds to deallocate network for instance. [ 1133.990850] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 2c2fb165-8906-4d42-a839-89ea6c8814ab] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1134.085598] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.095150] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "60d18f14-536a-4b0f-912b-21f3f5a30d28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.095403] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "60d18f14-536a-4b0f-912b-21f3f5a30d28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.095597] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "60d18f14-536a-4b0f-912b-21f3f5a30d28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.095777] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "60d18f14-536a-4b0f-912b-21f3f5a30d28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.095950] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "60d18f14-536a-4b0f-912b-21f3f5a30d28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.098047] env[63028]: INFO nova.compute.manager [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Terminating instance [ 1134.333254] env[63028]: DEBUG oslo_vmware.api [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736387, 'name': PowerOnVM_Task, 'duration_secs': 1.582796} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.333519] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1134.333726] env[63028]: INFO nova.compute.manager [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Took 9.12 seconds to spawn the instance on the hypervisor. [ 1134.333907] env[63028]: DEBUG nova.compute.manager [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1134.334708] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595d3a75-2f9d-4379-b0d8-8976f2632caf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.460632] env[63028]: DEBUG nova.network.neutron [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance_info_cache with network_info: [{"id": "eda2613a-55b1-4516-80ce-192d52a6abe6", "address": "fa:16:3e:43:ad:63", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeda2613a-55", "ovs_interfaceid": "eda2613a-55b1-4516-80ce-192d52a6abe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.485917] env[63028]: DEBUG oslo_concurrency.lockutils [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.493697] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 1cf111f2-df5e-48a6-905a-bc2d3ea45202] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1134.514033] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1134.514294] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550882', 'volume_id': 'a3edae11-41bd-48cc-9a17-b3d918c822c4', 'name': 'volume-a3edae11-41bd-48cc-9a17-b3d918c822c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6865f832-d409-4b9b-8b6c-33b0bf07d2b2', 'attached_at': '', 'detached_at': '', 'volume_id': 'a3edae11-41bd-48cc-9a17-b3d918c822c4', 'serial': 'a3edae11-41bd-48cc-9a17-b3d918c822c4'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1134.515297] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c0904a-9052-4233-b863-a456be6c3703 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.533941] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe48ac4-58e5-4dd5-a2cd-7a7514e960a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.557870] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-a3edae11-41bd-48cc-9a17-b3d918c822c4/volume-a3edae11-41bd-48cc-9a17-b3d918c822c4.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1134.558121] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a18ac120-0b73-470f-b9d6-82580e2af8e5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.576472] env[63028]: DEBUG oslo_vmware.api [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1134.576472] env[63028]: value = "task-2736389" [ 1134.576472] env[63028]: _type = "Task" [ 1134.576472] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.589432] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.592480] env[63028]: DEBUG oslo_vmware.api [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736389, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.601220] env[63028]: DEBUG nova.compute.manager [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1134.601435] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1134.602353] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6489ec-5440-4b52-816a-3f404411f438 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.609842] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1134.610112] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad1e753d-707f-4fe6-a5b2-f9dd6772d0b6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.615597] env[63028]: DEBUG oslo_vmware.api [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1134.615597] env[63028]: value = "task-2736390" [ 1134.615597] env[63028]: _type = "Task" [ 1134.615597] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.623091] env[63028]: DEBUG oslo_vmware.api [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.851319] env[63028]: INFO nova.compute.manager [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Took 25.89 seconds to build instance. [ 1134.919381] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428c72fb-a0b1-4b9c-8c84-8dd908c5e11a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.927466] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec2ee9c-6e65-4fbe-923c-497ccadb7ce6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.961149] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8853d5ea-a453-4849-9707-a56282e25b55 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.964191] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1134.971583] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77aacce3-fde2-4b28-8d89-5a900b1da735 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.689140] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 3566ab6f-1f8a-472d-9efb-47fa2520a215] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1135.691073] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7af5ed24-bb6e-4b2f-bc1a-f59b70216bda tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.743s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.716222] env[63028]: DEBUG oslo_vmware.api [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736389, 'name': ReconfigVM_Task, 'duration_secs': 0.344338} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.716706] env[63028]: DEBUG nova.compute.provider_tree [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1135.723312] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-a3edae11-41bd-48cc-9a17-b3d918c822c4/volume-a3edae11-41bd-48cc-9a17-b3d918c822c4.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1135.728072] env[63028]: DEBUG oslo_vmware.api [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736376, 'name': ReconfigVM_Task, 'duration_secs': 5.738751} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.728311] env[63028]: DEBUG oslo_vmware.api [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736390, 'name': PowerOffVM_Task, 'duration_secs': 0.186207} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.730037] env[63028]: DEBUG nova.scheduler.client.report [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1135.732958] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e44f5c0e-e35c-4071-9b55-cf7607104816 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.746020] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.746020] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Reconfigured VM to detach interface {{(pid=63028) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1135.746020] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1135.746020] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1135.746508] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afbc9663-2e5c-4277-a2a4-79a4025e7cc4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.752739] env[63028]: DEBUG oslo_vmware.api [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1135.752739] env[63028]: value = "task-2736391" [ 1135.752739] env[63028]: _type = "Task" [ 1135.752739] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.761239] env[63028]: DEBUG oslo_vmware.api [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736391, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.807620] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1135.808145] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1135.808145] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleting the datastore file [datastore1] 60d18f14-536a-4b0f-912b-21f3f5a30d28 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1135.808399] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-571c71c2-405e-43a1-98ac-b5ed4942c398 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.816348] env[63028]: DEBUG oslo_vmware.api [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1135.816348] env[63028]: value = "task-2736393" [ 1135.816348] env[63028]: _type = "Task" [ 1135.816348] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.825043] env[63028]: DEBUG oslo_vmware.api [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736393, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.198244] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: f804ec95-0b97-4960-844d-b678b97fc401] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1136.247653] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.248264] env[63028]: DEBUG nova.compute.manager [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1136.250964] env[63028]: DEBUG oslo_concurrency.lockutils [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.765s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.251233] env[63028]: DEBUG nova.objects.instance [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lazy-loading 'resources' on Instance uuid 629a3b6f-a74b-4193-bcf4-fc67a1752d5b {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.262808] env[63028]: DEBUG oslo_vmware.api [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736391, 'name': ReconfigVM_Task, 'duration_secs': 0.170874} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.263008] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550882', 'volume_id': 'a3edae11-41bd-48cc-9a17-b3d918c822c4', 'name': 'volume-a3edae11-41bd-48cc-9a17-b3d918c822c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6865f832-d409-4b9b-8b6c-33b0bf07d2b2', 'attached_at': '', 'detached_at': '', 'volume_id': 'a3edae11-41bd-48cc-9a17-b3d918c822c4', 'serial': 'a3edae11-41bd-48cc-9a17-b3d918c822c4'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1136.306529] env[63028]: DEBUG nova.compute.manager [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Stashing vm_state: active {{(pid=63028) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1136.325761] env[63028]: DEBUG oslo_vmware.api [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736393, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150689} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.326497] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1136.326585] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1136.326740] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1136.326916] env[63028]: INFO nova.compute.manager [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Took 1.73 seconds to destroy the instance on the hypervisor. [ 1136.327173] env[63028]: DEBUG oslo.service.loopingcall [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1136.327368] env[63028]: DEBUG nova.compute.manager [-] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1136.327478] env[63028]: DEBUG nova.network.neutron [-] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1136.702709] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: b3930760-1888-4f80-85d8-65120a25f275] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1136.706010] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc2a158-00e3-430e-8533-40f0139cf61a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.725423] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance '092c7673-97fb-4085-852c-04a7c19a73e7' progress to 0 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1136.754574] env[63028]: DEBUG nova.compute.utils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1136.759112] env[63028]: DEBUG nova.compute.manager [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1136.759112] env[63028]: DEBUG nova.network.neutron [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1136.780379] env[63028]: DEBUG nova.compute.manager [req-6c29d4bc-b5cc-4708-9f3f-4abacb8ca5b4 req-25ac399e-eb54-44cc-b2ea-54fda848b87d service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Received event network-vif-deleted-2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1136.780641] env[63028]: INFO nova.compute.manager [req-6c29d4bc-b5cc-4708-9f3f-4abacb8ca5b4 req-25ac399e-eb54-44cc-b2ea-54fda848b87d service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Neutron deleted interface 2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291; detaching it from the instance and deleting it from the info cache [ 1136.780820] env[63028]: DEBUG nova.network.neutron [req-6c29d4bc-b5cc-4708-9f3f-4abacb8ca5b4 req-25ac399e-eb54-44cc-b2ea-54fda848b87d service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.816384] env[63028]: DEBUG nova.policy [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b3c3b29bb4d4c23a09527bcda019773', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b4dcaef840f940bda057d0371cdc5adb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1136.834375] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.983129] env[63028]: DEBUG oslo_concurrency.lockutils [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "899496ae-8463-42e0-a287-b141d956fa0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.983438] env[63028]: DEBUG oslo_concurrency.lockutils [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "899496ae-8463-42e0-a287-b141d956fa0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.983664] env[63028]: DEBUG oslo_concurrency.lockutils [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "899496ae-8463-42e0-a287-b141d956fa0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.983854] env[63028]: DEBUG oslo_concurrency.lockutils [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "899496ae-8463-42e0-a287-b141d956fa0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.984045] env[63028]: DEBUG oslo_concurrency.lockutils [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "899496ae-8463-42e0-a287-b141d956fa0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.990740] env[63028]: INFO nova.compute.manager [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Terminating instance [ 1137.056388] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18cded1c-ae76-48ac-9115-432edef0b222 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.068241] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d614a28a-0447-4702-989c-c007693de258 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.100789] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1137.101159] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquired lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.101476] env[63028]: DEBUG nova.network.neutron [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1137.103730] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5782dfc1-c790-404e-be25-8afc61185300 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.112307] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359660e8-fc1c-4844-94d7-cb780bd8382d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.126833] env[63028]: DEBUG nova.compute.provider_tree [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.175856] env[63028]: DEBUG nova.network.neutron [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Successfully created port: 51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1137.210131] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: da23282a-bbda-47bf-9d9c-337ee9996779] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1137.233464] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1137.234204] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62d6df9e-06c2-4fa1-8f72-f190a8f05861 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.243244] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1137.243244] env[63028]: value = "task-2736394" [ 1137.243244] env[63028]: _type = "Task" [ 1137.243244] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.251711] env[63028]: DEBUG nova.network.neutron [-] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.252871] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736394, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.259154] env[63028]: DEBUG nova.compute.manager [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1137.284236] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11852afb-f53c-4420-9776-c00dab0a45e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.294239] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21095566-5c6d-46fa-9b72-2de42692ed09 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.333310] env[63028]: DEBUG nova.objects.instance [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lazy-loading 'flavor' on Instance uuid 6865f832-d409-4b9b-8b6c-33b0bf07d2b2 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1137.335093] env[63028]: DEBUG nova.compute.manager [req-6c29d4bc-b5cc-4708-9f3f-4abacb8ca5b4 req-25ac399e-eb54-44cc-b2ea-54fda848b87d service nova] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Detach interface failed, port_id=2e1ed2b6-3ab4-47dc-9247-d7c30d3bc291, reason: Instance 60d18f14-536a-4b0f-912b-21f3f5a30d28 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1137.499215] env[63028]: DEBUG nova.compute.manager [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1137.499457] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1137.500837] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc14c33-dd3c-44c8-afc9-3af74e1c8726 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.509760] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1137.509956] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fff33d4a-9d4b-41d9-af00-3c3370254434 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.516081] env[63028]: DEBUG oslo_vmware.api [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1137.516081] env[63028]: value = "task-2736395" [ 1137.516081] env[63028]: _type = "Task" [ 1137.516081] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.524188] env[63028]: DEBUG oslo_vmware.api [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736395, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.630551] env[63028]: DEBUG nova.scheduler.client.report [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1137.713262] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: c492dea4-9779-4460-a559-5b82fb0643f0] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1137.753636] env[63028]: INFO nova.compute.manager [-] [instance: 60d18f14-536a-4b0f-912b-21f3f5a30d28] Took 1.43 seconds to deallocate network for instance. [ 1137.760367] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736394, 'name': PowerOffVM_Task, 'duration_secs': 0.185185} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.770021] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1137.770021] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance '092c7673-97fb-4085-852c-04a7c19a73e7' progress to 17 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1137.839785] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9aceeeef-96bb-4ae1-89d6-443af598b240 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.940s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.023281] env[63028]: INFO nova.network.neutron [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Port dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1138.024429] env[63028]: DEBUG nova.network.neutron [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updating instance_info_cache with network_info: [{"id": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "address": "fa:16:3e:cb:8a:7c", "network": {"id": "c2f1496c-e3fd-43db-a032-12cdacdb4e46", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1287620143-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25a6457f62d149629c09589feb1a550c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap197b3459-f9", "ovs_interfaceid": "197b3459-f9f1-4fe3-a9ad-169350b4d637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.036618] env[63028]: DEBUG oslo_vmware.api [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736395, 'name': PowerOffVM_Task, 'duration_secs': 0.49906} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.037595] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1138.037860] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1138.038643] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56a6955e-b9e2-4ef4-87a2-4e7065ad37a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.111711] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1138.115017] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1138.115017] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Deleting the datastore file [datastore1] 899496ae-8463-42e0-a287-b141d956fa0a {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1138.115017] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-194fd764-4ad9-4fb8-acb9-6bc6c1faf0ea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.119335] env[63028]: DEBUG oslo_vmware.api [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1138.119335] env[63028]: value = "task-2736397" [ 1138.119335] env[63028]: _type = "Task" [ 1138.119335] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.129966] env[63028]: DEBUG oslo_vmware.api [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736397, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.137350] env[63028]: DEBUG oslo_concurrency.lockutils [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.885s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.138645] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.304s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.158372] env[63028]: INFO nova.scheduler.client.report [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted allocations for instance 629a3b6f-a74b-4193-bcf4-fc67a1752d5b [ 1138.216944] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: b16d85d7-13f3-4be0-8495-2fd2c1476f01] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1138.282953] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.284525] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1138.285024] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1138.285399] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1138.285750] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1138.286070] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1138.286401] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1138.286771] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1138.288427] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1138.288776] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1138.289127] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1138.289649] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1138.296831] env[63028]: DEBUG nova.compute.manager [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1138.299593] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-548882e7-fc1a-4913-896c-4700983fd99d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.320048] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1138.320048] env[63028]: value = "task-2736398" [ 1138.320048] env[63028]: _type = "Task" [ 1138.320048] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.329868] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736398, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.344190] env[63028]: DEBUG nova.virt.hardware [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1138.344362] env[63028]: DEBUG nova.virt.hardware [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1138.344543] env[63028]: DEBUG nova.virt.hardware [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1138.344727] env[63028]: DEBUG nova.virt.hardware [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1138.344872] env[63028]: DEBUG nova.virt.hardware [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1138.345102] env[63028]: DEBUG nova.virt.hardware [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1138.345362] env[63028]: DEBUG nova.virt.hardware [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1138.345552] env[63028]: DEBUG nova.virt.hardware [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1138.345728] env[63028]: DEBUG nova.virt.hardware [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1138.345891] env[63028]: DEBUG nova.virt.hardware [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1138.346080] env[63028]: DEBUG nova.virt.hardware [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1138.347801] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe09955b-230c-44fd-b3bf-c59badf6c99a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.356454] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70dd4a0-0641-4f63-a308-9d3fbd459b28 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.531939] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Releasing lock "refresh_cache-899496ae-8463-42e0-a287-b141d956fa0a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1138.537814] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.538082] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.629663] env[63028]: DEBUG oslo_vmware.api [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736397, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26061} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.629917] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1138.630109] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1138.630279] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1138.630473] env[63028]: INFO nova.compute.manager [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1138.630716] env[63028]: DEBUG oslo.service.loopingcall [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1138.630897] env[63028]: DEBUG nova.compute.manager [-] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1138.630985] env[63028]: DEBUG nova.network.neutron [-] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1138.644184] env[63028]: INFO nova.compute.claims [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1138.670738] env[63028]: DEBUG oslo_concurrency.lockutils [None req-136b605e-0b95-410e-8489-43ca99815561 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "629a3b6f-a74b-4193-bcf4-fc67a1752d5b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.585s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.722078] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 56e6ade9-893b-4c85-b0b8-e9f7b12cbad6] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1138.812055] env[63028]: DEBUG nova.compute.manager [req-d48209c6-4aef-48f7-9a19-3119b40750ff req-dfb192da-97d5-4ac3-ad68-88c4f81f24a3 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Received event network-vif-plugged-51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1138.812315] env[63028]: DEBUG oslo_concurrency.lockutils [req-d48209c6-4aef-48f7-9a19-3119b40750ff req-dfb192da-97d5-4ac3-ad68-88c4f81f24a3 service nova] Acquiring lock "b438b12e-874a-4883-b606-c28258e5a01a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.812532] env[63028]: DEBUG oslo_concurrency.lockutils [req-d48209c6-4aef-48f7-9a19-3119b40750ff req-dfb192da-97d5-4ac3-ad68-88c4f81f24a3 service nova] Lock "b438b12e-874a-4883-b606-c28258e5a01a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.812885] env[63028]: DEBUG oslo_concurrency.lockutils [req-d48209c6-4aef-48f7-9a19-3119b40750ff req-dfb192da-97d5-4ac3-ad68-88c4f81f24a3 service nova] Lock "b438b12e-874a-4883-b606-c28258e5a01a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.813158] env[63028]: DEBUG nova.compute.manager [req-d48209c6-4aef-48f7-9a19-3119b40750ff req-dfb192da-97d5-4ac3-ad68-88c4f81f24a3 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] No waiting events found dispatching network-vif-plugged-51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1138.813272] env[63028]: WARNING nova.compute.manager [req-d48209c6-4aef-48f7-9a19-3119b40750ff req-dfb192da-97d5-4ac3-ad68-88c4f81f24a3 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Received unexpected event network-vif-plugged-51c6d9df-5b87-40a0-8c2b-5586869a3c0f for instance with vm_state building and task_state spawning. [ 1138.828430] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736398, 'name': ReconfigVM_Task, 'duration_secs': 0.15744} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.828719] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance '092c7673-97fb-4085-852c-04a7c19a73e7' progress to 33 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1138.981041] env[63028]: DEBUG nova.network.neutron [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Successfully updated port: 51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1139.036038] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8164c458-87d5-478e-907b-466193b3ffdd tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "interface-899496ae-8463-42e0-a287-b141d956fa0a-dfcd47cc-53f0-4202-ba7f-b439ff3a8fe7" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.538s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.041771] env[63028]: INFO nova.compute.manager [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Detaching volume a3edae11-41bd-48cc-9a17-b3d918c822c4 [ 1139.077285] env[63028]: INFO nova.virt.block_device [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Attempting to driver detach volume a3edae11-41bd-48cc-9a17-b3d918c822c4 from mountpoint /dev/sdb [ 1139.077573] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1139.077776] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550882', 'volume_id': 'a3edae11-41bd-48cc-9a17-b3d918c822c4', 'name': 'volume-a3edae11-41bd-48cc-9a17-b3d918c822c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6865f832-d409-4b9b-8b6c-33b0bf07d2b2', 'attached_at': '', 'detached_at': '', 'volume_id': 'a3edae11-41bd-48cc-9a17-b3d918c822c4', 'serial': 'a3edae11-41bd-48cc-9a17-b3d918c822c4'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1139.078631] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487a6589-96e6-433d-a59c-5c9b8c6d356c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.102072] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366f6882-3085-49d3-8404-904b208f093f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.109066] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca79b40-00fb-471a-9819-151d37d8224c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.131570] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb0a332-42fe-41d0-8a38-56316557a055 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.147158] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] The volume has not been displaced from its original location: [datastore2] volume-a3edae11-41bd-48cc-9a17-b3d918c822c4/volume-a3edae11-41bd-48cc-9a17-b3d918c822c4.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1139.152438] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1139.154039] env[63028]: INFO nova.compute.resource_tracker [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating resource usage from migration 6dd553ad-526a-4c74-9b06-c8c1cfe1fe09 [ 1139.156527] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db55c906-4674-4317-9179-42a24ec68c63 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.177066] env[63028]: DEBUG oslo_vmware.api [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1139.177066] env[63028]: value = "task-2736399" [ 1139.177066] env[63028]: _type = "Task" [ 1139.177066] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.185153] env[63028]: DEBUG oslo_vmware.api [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736399, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.225095] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 53b0cf02-f1c8-4219-a58c-f7b5ffbb1ae7] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1139.243451] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "feb8d206-718d-423a-afff-76c6975934e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.243649] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "feb8d206-718d-423a-afff-76c6975934e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.335302] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.335635] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.335855] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.336116] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.336309] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.336479] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.336740] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.336955] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.337298] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.337375] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.337597] env[63028]: DEBUG nova.virt.hardware [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.344424] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1139.347420] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbe49b44-b79c-4556-9216-77f66a678389 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.366301] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1139.366301] env[63028]: value = "task-2736400" [ 1139.366301] env[63028]: _type = "Task" [ 1139.366301] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.375840] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736400, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.412058] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6974e69-bdae-4680-bc41-970e36ea7108 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.419442] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953bf04d-ede9-4885-b933-c0e290536d02 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.451577] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b555dca-8868-4a19-b0ee-2b9f0a1c0e49 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.459233] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1261af0-4c3d-49c2-aaf3-83d750b792f4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.473175] env[63028]: DEBUG nova.compute.provider_tree [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1139.483021] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.483191] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.483323] env[63028]: DEBUG nova.network.neutron [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1139.687446] env[63028]: DEBUG oslo_vmware.api [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736399, 'name': ReconfigVM_Task, 'duration_secs': 0.337862} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.687739] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1139.692356] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10332e4f-16a1-4ff3-81df-d58662523feb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.707010] env[63028]: DEBUG oslo_vmware.api [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1139.707010] env[63028]: value = "task-2736401" [ 1139.707010] env[63028]: _type = "Task" [ 1139.707010] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.715171] env[63028]: DEBUG oslo_vmware.api [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736401, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.728864] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 3fb46d02-7914-4d08-b63b-f3447ba1b81a] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1139.749956] env[63028]: DEBUG nova.compute.manager [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1139.876326] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736400, 'name': ReconfigVM_Task, 'duration_secs': 0.147948} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.876605] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1139.877421] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1438d8-d404-4218-b4ec-75327cdbbb8c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.900712] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75/volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1139.901051] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29bf0cd2-61c0-44a6-9f33-db7889010a2e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.919247] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1139.919247] env[63028]: value = "task-2736402" [ 1139.919247] env[63028]: _type = "Task" [ 1139.919247] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.926953] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736402, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.994613] env[63028]: ERROR nova.scheduler.client.report [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [req-4f169a47-02e9-451e-ad95-e135bc1c6c8d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4f169a47-02e9-451e-ad95-e135bc1c6c8d"}]} [ 1140.012911] env[63028]: DEBUG nova.scheduler.client.report [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Refreshing inventories for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1140.014954] env[63028]: DEBUG nova.network.neutron [-] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.020170] env[63028]: DEBUG nova.network.neutron [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1140.028876] env[63028]: DEBUG nova.scheduler.client.report [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Updating ProviderTree inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1140.029049] env[63028]: DEBUG nova.compute.provider_tree [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 110, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1140.041601] env[63028]: DEBUG nova.scheduler.client.report [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Refreshing aggregate associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, aggregates: None {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1140.062739] env[63028]: DEBUG nova.scheduler.client.report [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Refreshing trait associations for resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63028) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1140.166835] env[63028]: DEBUG nova.network.neutron [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updating instance_info_cache with network_info: [{"id": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "address": "fa:16:3e:32:0e:40", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51c6d9df-5b", "ovs_interfaceid": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.220613] env[63028]: DEBUG oslo_vmware.api [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736401, 'name': ReconfigVM_Task, 'duration_secs': 0.226023} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.223005] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550882', 'volume_id': 'a3edae11-41bd-48cc-9a17-b3d918c822c4', 'name': 'volume-a3edae11-41bd-48cc-9a17-b3d918c822c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6865f832-d409-4b9b-8b6c-33b0bf07d2b2', 'attached_at': '', 'detached_at': '', 'volume_id': 'a3edae11-41bd-48cc-9a17-b3d918c822c4', 'serial': 'a3edae11-41bd-48cc-9a17-b3d918c822c4'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1140.232395] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: c0693e4c-30b2-4eda-be1e-f6186d78038b] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1140.260460] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c551f991-e8c2-4894-8834-de2721c45b47 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.268972] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d633f80f-04ec-44f7-bf7d-524af5e01cae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.272566] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.299287] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4310310c-2755-424a-9727-058a0401751f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.306052] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020c41fc-7fd1-4453-ac86-13b9990aa069 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.320090] env[63028]: DEBUG nova.compute.provider_tree [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1140.428961] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736402, 'name': ReconfigVM_Task, 'duration_secs': 0.307884} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.429226] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Reconfigured VM instance instance-00000071 to attach disk [datastore2] volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75/volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.429493] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance '092c7673-97fb-4085-852c-04a7c19a73e7' progress to 50 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1140.518142] env[63028]: INFO nova.compute.manager [-] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Took 1.89 seconds to deallocate network for instance. [ 1140.670404] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1140.670748] env[63028]: DEBUG nova.compute.manager [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Instance network_info: |[{"id": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "address": "fa:16:3e:32:0e:40", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51c6d9df-5b", "ovs_interfaceid": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1140.671184] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:0e:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4df917f7-847a-4c0e-b0e3-69a52e4a1554', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51c6d9df-5b87-40a0-8c2b-5586869a3c0f', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1140.678515] env[63028]: DEBUG oslo.service.loopingcall [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1140.678723] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1140.678939] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a7cffe4-a0e5-4afa-ae8e-85b992a8fa71 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.698794] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1140.698794] env[63028]: value = "task-2736403" [ 1140.698794] env[63028]: _type = "Task" [ 1140.698794] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.707100] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736403, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.736504] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: c386c117-e255-4c3b-9a37-011e517277de] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1140.769623] env[63028]: DEBUG nova.objects.instance [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lazy-loading 'flavor' on Instance uuid 6865f832-d409-4b9b-8b6c-33b0bf07d2b2 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.842087] env[63028]: DEBUG nova.compute.manager [req-04ab5ac8-d9aa-4d69-be41-7958b99f426f req-5bfc058d-3929-468b-a1a2-6abcabac6584 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Received event network-changed-51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1140.842317] env[63028]: DEBUG nova.compute.manager [req-04ab5ac8-d9aa-4d69-be41-7958b99f426f req-5bfc058d-3929-468b-a1a2-6abcabac6584 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Refreshing instance network info cache due to event network-changed-51c6d9df-5b87-40a0-8c2b-5586869a3c0f. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1140.842533] env[63028]: DEBUG oslo_concurrency.lockutils [req-04ab5ac8-d9aa-4d69-be41-7958b99f426f req-5bfc058d-3929-468b-a1a2-6abcabac6584 service nova] Acquiring lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.842678] env[63028]: DEBUG oslo_concurrency.lockutils [req-04ab5ac8-d9aa-4d69-be41-7958b99f426f req-5bfc058d-3929-468b-a1a2-6abcabac6584 service nova] Acquired lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.842838] env[63028]: DEBUG nova.network.neutron [req-04ab5ac8-d9aa-4d69-be41-7958b99f426f req-5bfc058d-3929-468b-a1a2-6abcabac6584 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Refreshing network info cache for port 51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1140.849750] env[63028]: DEBUG nova.scheduler.client.report [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Updated inventory for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with generation 160 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1140.849994] env[63028]: DEBUG nova.compute.provider_tree [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Updating resource provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 generation from 160 to 161 during operation: update_inventory {{(pid=63028) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1140.850195] env[63028]: DEBUG nova.compute.provider_tree [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Updating inventory in ProviderTree for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1140.935393] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648ddb98-6d41-4b3e-a601-15125fc3538c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.958918] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed0001f-bac3-44b1-ac3a-d0cc643c9f67 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.977691] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance '092c7673-97fb-4085-852c-04a7c19a73e7' progress to 67 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1141.024000] env[63028]: DEBUG oslo_concurrency.lockutils [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.210505] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736403, 'name': CreateVM_Task, 'duration_secs': 0.303735} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.213039] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1141.213039] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.213039] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.213039] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1141.213039] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04c46d8c-13c1-49cc-b722-0cbc886ac04f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.216585] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1141.216585] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b9a80a-adac-7a7a-be77-95b63ab640e7" [ 1141.216585] env[63028]: _type = "Task" [ 1141.216585] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.223717] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b9a80a-adac-7a7a-be77-95b63ab640e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.239230] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 56d6982d-9f76-4952-8c8b-f64b3c8d02fe] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1141.355435] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.217s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.355571] env[63028]: INFO nova.compute.manager [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Migrating [ 1141.361876] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.081s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.362109] env[63028]: DEBUG nova.objects.instance [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'resources' on Instance uuid 60d18f14-536a-4b0f-912b-21f3f5a30d28 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1141.548014] env[63028]: DEBUG nova.network.neutron [req-04ab5ac8-d9aa-4d69-be41-7958b99f426f req-5bfc058d-3929-468b-a1a2-6abcabac6584 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updated VIF entry in instance network info cache for port 51c6d9df-5b87-40a0-8c2b-5586869a3c0f. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1141.548401] env[63028]: DEBUG nova.network.neutron [req-04ab5ac8-d9aa-4d69-be41-7958b99f426f req-5bfc058d-3929-468b-a1a2-6abcabac6584 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updating instance_info_cache with network_info: [{"id": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "address": "fa:16:3e:32:0e:40", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51c6d9df-5b", "ovs_interfaceid": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.726676] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b9a80a-adac-7a7a-be77-95b63ab640e7, 'name': SearchDatastore_Task, 'duration_secs': 0.009539} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.726934] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.727190] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1141.727433] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.727582] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.727761] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1141.728036] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ab1bd5d-3a9f-43fb-9f4e-c155846714b9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.736066] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1141.736241] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1141.736912] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0efd2a7f-2478-4553-b828-b659588049ef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.741804] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 455578fa-7468-40dc-8c0a-37ac35e5c0a0] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1141.743604] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1141.743604] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52af3e69-500f-c4ba-689d-004f48f6fbcb" [ 1141.743604] env[63028]: _type = "Task" [ 1141.743604] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.751021] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52af3e69-500f-c4ba-689d-004f48f6fbcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.778065] env[63028]: DEBUG oslo_concurrency.lockutils [None req-eb1339d2-6ecb-478a-81a4-e07c347d959a tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.239s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.873393] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.873571] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.873747] env[63028]: DEBUG nova.network.neutron [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1142.051501] env[63028]: DEBUG oslo_concurrency.lockutils [req-04ab5ac8-d9aa-4d69-be41-7958b99f426f req-5bfc058d-3929-468b-a1a2-6abcabac6584 service nova] Releasing lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.051745] env[63028]: DEBUG nova.compute.manager [req-04ab5ac8-d9aa-4d69-be41-7958b99f426f req-5bfc058d-3929-468b-a1a2-6abcabac6584 service nova] [instance: 899496ae-8463-42e0-a287-b141d956fa0a] Received event network-vif-deleted-197b3459-f9f1-4fe3-a9ad-169350b4d637 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1142.068051] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f0dcf5-f029-4e10-b19b-7f66bd624dcd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.075218] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b67acc2-4f95-4b13-9e2d-b6b4ed26604e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.108386] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65c56d2-95ed-49a0-b880-985d8f9cc409 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.116278] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab9bf47-408d-400c-85cd-d092870afe1d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.130014] env[63028]: DEBUG nova.compute.provider_tree [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1142.248636] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: e7ba7069-fe9f-4e0f-acc6-c1d61ad5a8d9] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1142.256438] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52af3e69-500f-c4ba-689d-004f48f6fbcb, 'name': SearchDatastore_Task, 'duration_secs': 0.008225} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.257231] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1a5c01e-44e4-40a6-abab-ad6e78aece3b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.262144] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1142.262144] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525158a8-ea40-14e8-b9cc-b5c628a00503" [ 1142.262144] env[63028]: _type = "Task" [ 1142.262144] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.269574] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525158a8-ea40-14e8-b9cc-b5c628a00503, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.558163] env[63028]: DEBUG nova.network.neutron [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance_info_cache with network_info: [{"id": "f5759890-6244-4b8a-9a03-6d628f2441b7", "address": "fa:16:3e:61:25:8b", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5759890-62", "ovs_interfaceid": "f5759890-6244-4b8a-9a03-6d628f2441b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.609794] env[63028]: DEBUG nova.network.neutron [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Port eda2613a-55b1-4516-80ce-192d52a6abe6 binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1142.635731] env[63028]: DEBUG nova.scheduler.client.report [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1142.752592] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: a97224e8-d69b-4c62-ab96-7cef037ef39b] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1142.775289] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525158a8-ea40-14e8-b9cc-b5c628a00503, 'name': SearchDatastore_Task, 'duration_secs': 0.009543} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.776179] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.776459] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] b438b12e-874a-4883-b606-c28258e5a01a/b438b12e-874a-4883-b606-c28258e5a01a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1142.776729] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4a4708e-b0eb-4e3a-9ed2-6d8c3e3b1cd5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.783623] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1142.783623] env[63028]: value = "task-2736404" [ 1142.783623] env[63028]: _type = "Task" [ 1142.783623] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.791366] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736404, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.796030] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.796268] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.796460] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.796644] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.796824] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.798800] env[63028]: INFO nova.compute.manager [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Terminating instance [ 1143.061775] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1143.140568] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.779s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.144143] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.871s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.145812] env[63028]: INFO nova.compute.claims [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1143.170021] env[63028]: INFO nova.scheduler.client.report [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleted allocations for instance 60d18f14-536a-4b0f-912b-21f3f5a30d28 [ 1143.255993] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: bb2b405e-6207-4718-9485-0271d26c160f] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1143.293478] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736404, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493673} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.293717] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore2] b438b12e-874a-4883-b606-c28258e5a01a/b438b12e-874a-4883-b606-c28258e5a01a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1143.293925] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1143.294177] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d21d5cd8-6ce2-4659-a0f5-5626863405c7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.299669] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1143.299669] env[63028]: value = "task-2736405" [ 1143.299669] env[63028]: _type = "Task" [ 1143.299669] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.302845] env[63028]: DEBUG nova.compute.manager [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1143.303041] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1143.303914] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44952a3b-0488-4794-8493-c1824c9b80ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.310968] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1143.313977] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-697e1ee4-2d10-4ff9-8314-cac456f0b1e1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.315640] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736405, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.320117] env[63028]: DEBUG oslo_vmware.api [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1143.320117] env[63028]: value = "task-2736406" [ 1143.320117] env[63028]: _type = "Task" [ 1143.320117] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.328482] env[63028]: DEBUG oslo_vmware.api [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736406, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.632039] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "092c7673-97fb-4085-852c-04a7c19a73e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.632039] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "092c7673-97fb-4085-852c-04a7c19a73e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.632039] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "092c7673-97fb-4085-852c-04a7c19a73e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.679246] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6027d2b-205b-45b7-8174-642af8052177 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "60d18f14-536a-4b0f-912b-21f3f5a30d28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.584s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.760188] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 672695c2-06f3-4790-a459-4b575baf29d3] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1143.809983] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736405, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066991} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.810264] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1143.811032] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e74692-5ae2-4602-a814-cd7ca0c551d4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.832639] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] b438b12e-874a-4883-b606-c28258e5a01a/b438b12e-874a-4883-b606-c28258e5a01a.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1143.835570] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53c311aa-ba09-44ef-bf54-c386254592dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.853817] env[63028]: DEBUG oslo_vmware.api [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736406, 'name': PowerOffVM_Task, 'duration_secs': 0.196525} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.854975] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1143.855163] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1143.855479] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1143.855479] env[63028]: value = "task-2736407" [ 1143.855479] env[63028]: _type = "Task" [ 1143.855479] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.855671] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db3166ae-6981-419b-867b-763a9befeb55 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.865141] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736407, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.920049] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1143.920428] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1143.920690] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Deleting the datastore file [datastore1] 6865f832-d409-4b9b-8b6c-33b0bf07d2b2 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1143.921073] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01c83e04-69a3-48f9-8f76-6f76f8ab7565 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.928425] env[63028]: DEBUG oslo_vmware.api [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1143.928425] env[63028]: value = "task-2736409" [ 1143.928425] env[63028]: _type = "Task" [ 1143.928425] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.937058] env[63028]: DEBUG oslo_vmware.api [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.266029] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 8bb61bfa-d44e-4e06-867a-445d9b3db660] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1144.333305] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68839be0-f86c-4f17-82f2-23e78f9e19d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.340786] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b742c9a5-2a5a-4ee4-9c02-4a4d09a048e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.375142] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b34b1c-c15b-4235-8ad2-28d8fc646fe6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.382365] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736407, 'name': ReconfigVM_Task, 'duration_secs': 0.300736} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.384419] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Reconfigured VM instance instance-00000074 to attach disk [datastore2] b438b12e-874a-4883-b606-c28258e5a01a/b438b12e-874a-4883-b606-c28258e5a01a.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1144.385091] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cae7b0df-39df-4f7c-ae96-a14079ca250b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.387535] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd571e41-5fc9-4771-9aad-e68d588e4595 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.396131] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1144.396131] env[63028]: value = "task-2736410" [ 1144.396131] env[63028]: _type = "Task" [ 1144.396131] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.404407] env[63028]: DEBUG nova.compute.provider_tree [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.418885] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736410, 'name': Rename_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.438582] env[63028]: DEBUG oslo_vmware.api [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166428} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.438818] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1144.438999] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1144.439186] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1144.439356] env[63028]: INFO nova.compute.manager [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1144.439590] env[63028]: DEBUG oslo.service.loopingcall [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1144.439771] env[63028]: DEBUG nova.compute.manager [-] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1144.439863] env[63028]: DEBUG nova.network.neutron [-] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1144.576978] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b094aeb7-e61d-4bb3-8609-1d7f500d766a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.595584] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance 'f96d4bcd-a032-4e4d-94e4-12d7013d5e3f' progress to 0 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1144.689491] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.689707] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.689909] env[63028]: DEBUG nova.network.neutron [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1144.768527] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 8f621e7b-0c76-4f70-830d-09d28a2e0736] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1144.914084] env[63028]: DEBUG nova.scheduler.client.report [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1144.918290] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736410, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.103022] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1145.103022] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8bd74f7-df2f-41d3-ba65-ed568cf70aef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.112083] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1145.112083] env[63028]: value = "task-2736411" [ 1145.112083] env[63028]: _type = "Task" [ 1145.112083] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.120207] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736411, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.214850] env[63028]: DEBUG nova.compute.manager [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Stashing vm_state: active {{(pid=63028) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1145.271320] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 46dc76bc-854f-46ad-9db5-21cf6f40fb21] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1145.305580] env[63028]: DEBUG nova.compute.manager [req-12a7a072-b7c2-4a93-99d0-9890d042c031 req-8b5ee910-772c-4665-a74f-6cde30a584ed service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Received event network-vif-deleted-4c30382c-2b94-4990-b3d6-533480eb847b {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1145.307074] env[63028]: INFO nova.compute.manager [req-12a7a072-b7c2-4a93-99d0-9890d042c031 req-8b5ee910-772c-4665-a74f-6cde30a584ed service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Neutron deleted interface 4c30382c-2b94-4990-b3d6-533480eb847b; detaching it from the instance and deleting it from the info cache [ 1145.307074] env[63028]: DEBUG nova.network.neutron [req-12a7a072-b7c2-4a93-99d0-9890d042c031 req-8b5ee910-772c-4665-a74f-6cde30a584ed service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.417848] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736410, 'name': Rename_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.420065] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.277s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.420817] env[63028]: DEBUG nova.compute.manager [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1145.424230] env[63028]: DEBUG oslo_concurrency.lockutils [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.400s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.424621] env[63028]: DEBUG nova.objects.instance [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'resources' on Instance uuid 899496ae-8463-42e0-a287-b141d956fa0a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.472889] env[63028]: DEBUG nova.network.neutron [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance_info_cache with network_info: [{"id": "eda2613a-55b1-4516-80ce-192d52a6abe6", "address": "fa:16:3e:43:ad:63", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeda2613a-55", "ovs_interfaceid": "eda2613a-55b1-4516-80ce-192d52a6abe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.620139] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736411, 'name': PowerOffVM_Task, 'duration_secs': 0.192034} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.620348] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1145.620533] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance 'f96d4bcd-a032-4e4d-94e4-12d7013d5e3f' progress to 17 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1145.642415] env[63028]: DEBUG nova.network.neutron [-] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.735299] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.776197] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: addcf94a-1a56-49ff-8adb-3ce7f2d1e09e] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1145.809340] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e8820350-5c46-45fa-8978-e6be4771522a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.818440] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3fbccb3-2a79-4d3a-a0d4-c54e2219bf9c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.854388] env[63028]: DEBUG nova.compute.manager [req-12a7a072-b7c2-4a93-99d0-9890d042c031 req-8b5ee910-772c-4665-a74f-6cde30a584ed service nova] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Detach interface failed, port_id=4c30382c-2b94-4990-b3d6-533480eb847b, reason: Instance 6865f832-d409-4b9b-8b6c-33b0bf07d2b2 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1145.913195] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736410, 'name': Rename_Task, 'duration_secs': 1.20225} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.913425] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1145.913673] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5701bdb-0dc8-4544-a4f1-6a939116d719 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.918911] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1145.918911] env[63028]: value = "task-2736412" [ 1145.918911] env[63028]: _type = "Task" [ 1145.918911] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.927067] env[63028]: DEBUG nova.compute.utils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1145.928220] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736412, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.928491] env[63028]: DEBUG nova.compute.manager [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1145.928652] env[63028]: DEBUG nova.network.neutron [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1145.969838] env[63028]: DEBUG nova.policy [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c022ca18b0a41ce9d790fa25f6ebf8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea26842446ec4691a6456a6659188704', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1145.975444] env[63028]: DEBUG oslo_concurrency.lockutils [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.126506] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1146.126755] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1146.126918] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1146.127117] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1146.127370] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1146.127436] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1146.127608] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1146.127768] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1146.127932] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1146.128105] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1146.128313] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1146.134107] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0fb4bf5-5bc0-497c-8976-024950a5b08f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.145252] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8466a5f8-f919-453b-8a33-746cc435300a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.147991] env[63028]: INFO nova.compute.manager [-] [instance: 6865f832-d409-4b9b-8b6c-33b0bf07d2b2] Took 1.71 seconds to deallocate network for instance. [ 1146.156433] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1146.156433] env[63028]: value = "task-2736413" [ 1146.156433] env[63028]: _type = "Task" [ 1146.156433] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.157412] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9c3707-db24-46ab-acbf-95e87f9a3ec1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.169410] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736413, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.196457] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b522260-db9a-4e81-a59f-7f87974b8243 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.205696] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c99715c-a5f2-4be9-bab0-15a30c8f75b4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.222611] env[63028]: DEBUG nova.compute.provider_tree [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1146.248175] env[63028]: DEBUG nova.network.neutron [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Successfully created port: 6e773949-46af-40ce-a861-613b26d282ab {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1146.279675] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: ed872f21-c2c4-4597-8c9e-9f8d2202b707] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1146.430178] env[63028]: DEBUG oslo_vmware.api [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736412, 'name': PowerOnVM_Task, 'duration_secs': 0.455921} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.430178] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1146.430178] env[63028]: INFO nova.compute.manager [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Took 8.13 seconds to spawn the instance on the hypervisor. [ 1146.430609] env[63028]: DEBUG nova.compute.manager [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1146.431634] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce79fc9a-928b-4d71-aa1a-52710b7d4e17 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.435038] env[63028]: DEBUG nova.compute.manager [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1146.485672] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1068c4-3ed0-4ff7-a2a8-16a9ba807108 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.493746] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1f1446-b10f-4a86-a2d2-c5042b591f44 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.654553] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.669718] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736413, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.724878] env[63028]: DEBUG nova.scheduler.client.report [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1146.783562] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: b77ba7d6-305e-4b60-a4b7-9353c12c3920] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1146.956058] env[63028]: INFO nova.compute.manager [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Took 13.27 seconds to build instance. [ 1147.171294] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736413, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.229777] env[63028]: DEBUG oslo_concurrency.lockutils [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.806s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.231949] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.497s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.254470] env[63028]: INFO nova.scheduler.client.report [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Deleted allocations for instance 899496ae-8463-42e0-a287-b141d956fa0a [ 1147.288209] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 70147f2f-0b5e-4343-84e4-8bc195a5485d] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1147.445406] env[63028]: DEBUG nova.compute.manager [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1147.458301] env[63028]: DEBUG oslo_concurrency.lockutils [None req-2e044ff6-e42e-46f5-a0a3-f833f4cd7cda tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "b438b12e-874a-4883-b606-c28258e5a01a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.780s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.472675] env[63028]: DEBUG nova.virt.hardware [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1147.472929] env[63028]: DEBUG nova.virt.hardware [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1147.473101] env[63028]: DEBUG nova.virt.hardware [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1147.473288] env[63028]: DEBUG nova.virt.hardware [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1147.473437] env[63028]: DEBUG nova.virt.hardware [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1147.473587] env[63028]: DEBUG nova.virt.hardware [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1147.473797] env[63028]: DEBUG nova.virt.hardware [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1147.473955] env[63028]: DEBUG nova.virt.hardware [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1147.474134] env[63028]: DEBUG nova.virt.hardware [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1147.474367] env[63028]: DEBUG nova.virt.hardware [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1147.474480] env[63028]: DEBUG nova.virt.hardware [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1147.475357] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29093a4-0440-4c5e-8725-b452f729efc3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.483430] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb57a244-d567-4fdf-9a3e-ae3e3510ce01 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.590520] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8e9e29-f642-4be2-8c74-0bc3290ed787 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.610777] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b382959-6f33-422d-82ae-01991d306799 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.617831] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance '092c7673-97fb-4085-852c-04a7c19a73e7' progress to 83 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1147.676172] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736413, 'name': ReconfigVM_Task, 'duration_secs': 1.164229} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.676737] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance 'f96d4bcd-a032-4e4d-94e4-12d7013d5e3f' progress to 33 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1147.736835] env[63028]: INFO nova.compute.claims [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1147.762572] env[63028]: DEBUG oslo_concurrency.lockutils [None req-805a773c-4318-4883-8862-160c0c057105 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "899496ae-8463-42e0-a287-b141d956fa0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.779s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.791852] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: a2f7d7c6-7931-4b21-a29c-bb9965577210] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1147.848512] env[63028]: DEBUG nova.compute.manager [req-1b8fe073-4a84-42c3-9b55-6669a7848e43 req-af0a6248-8c11-46d4-89d5-8e13d61b9a2a service nova] [instance: feb8d206-718d-423a-afff-76c6975934e6] Received event network-vif-plugged-6e773949-46af-40ce-a861-613b26d282ab {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1147.848512] env[63028]: DEBUG oslo_concurrency.lockutils [req-1b8fe073-4a84-42c3-9b55-6669a7848e43 req-af0a6248-8c11-46d4-89d5-8e13d61b9a2a service nova] Acquiring lock "feb8d206-718d-423a-afff-76c6975934e6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.848512] env[63028]: DEBUG oslo_concurrency.lockutils [req-1b8fe073-4a84-42c3-9b55-6669a7848e43 req-af0a6248-8c11-46d4-89d5-8e13d61b9a2a service nova] Lock "feb8d206-718d-423a-afff-76c6975934e6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.848673] env[63028]: DEBUG oslo_concurrency.lockutils [req-1b8fe073-4a84-42c3-9b55-6669a7848e43 req-af0a6248-8c11-46d4-89d5-8e13d61b9a2a service nova] Lock "feb8d206-718d-423a-afff-76c6975934e6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.848731] env[63028]: DEBUG nova.compute.manager [req-1b8fe073-4a84-42c3-9b55-6669a7848e43 req-af0a6248-8c11-46d4-89d5-8e13d61b9a2a service nova] [instance: feb8d206-718d-423a-afff-76c6975934e6] No waiting events found dispatching network-vif-plugged-6e773949-46af-40ce-a861-613b26d282ab {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1147.848967] env[63028]: WARNING nova.compute.manager [req-1b8fe073-4a84-42c3-9b55-6669a7848e43 req-af0a6248-8c11-46d4-89d5-8e13d61b9a2a service nova] [instance: feb8d206-718d-423a-afff-76c6975934e6] Received unexpected event network-vif-plugged-6e773949-46af-40ce-a861-613b26d282ab for instance with vm_state building and task_state spawning. [ 1147.898568] env[63028]: DEBUG nova.network.neutron [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Successfully updated port: 6e773949-46af-40ce-a861-613b26d282ab {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1148.128021] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1148.128021] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59b8bb83-b2c0-4f1e-ab22-b5096886b452 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.133776] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1148.133776] env[63028]: value = "task-2736414" [ 1148.133776] env[63028]: _type = "Task" [ 1148.133776] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.141943] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736414, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.171072] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "e048cadf-9dc1-4eb7-a825-422d0736231c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.171568] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "e048cadf-9dc1-4eb7-a825-422d0736231c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1148.171895] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "e048cadf-9dc1-4eb7-a825-422d0736231c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.172244] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "e048cadf-9dc1-4eb7-a825-422d0736231c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1148.172539] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "e048cadf-9dc1-4eb7-a825-422d0736231c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1148.174785] env[63028]: INFO nova.compute.manager [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Terminating instance [ 1148.182879] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1148.183318] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1148.183593] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1148.183886] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1148.184178] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1148.185282] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1148.185282] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1148.185282] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1148.185282] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1148.185282] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1148.185526] env[63028]: DEBUG nova.virt.hardware [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1148.191055] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1148.191389] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58e2e6ca-1a5f-4d72-ba99-95f5aec1c97e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.210571] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1148.210571] env[63028]: value = "task-2736415" [ 1148.210571] env[63028]: _type = "Task" [ 1148.210571] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.218725] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736415, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.242272] env[63028]: INFO nova.compute.resource_tracker [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating resource usage from migration 58c26ff3-a56c-425c-bc63-1f1d728e3998 [ 1148.296445] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 3b90dbb8-66ce-435f-beae-5464720bfb3e] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1148.302870] env[63028]: DEBUG nova.compute.manager [req-ef6ff9b0-44f8-4785-a96e-b39f093feca5 req-76a7f361-d743-4a2b-aa94-d54b5a5ac185 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Received event network-changed-51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1148.302870] env[63028]: DEBUG nova.compute.manager [req-ef6ff9b0-44f8-4785-a96e-b39f093feca5 req-76a7f361-d743-4a2b-aa94-d54b5a5ac185 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Refreshing instance network info cache due to event network-changed-51c6d9df-5b87-40a0-8c2b-5586869a3c0f. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1148.303221] env[63028]: DEBUG oslo_concurrency.lockutils [req-ef6ff9b0-44f8-4785-a96e-b39f093feca5 req-76a7f361-d743-4a2b-aa94-d54b5a5ac185 service nova] Acquiring lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1148.303221] env[63028]: DEBUG oslo_concurrency.lockutils [req-ef6ff9b0-44f8-4785-a96e-b39f093feca5 req-76a7f361-d743-4a2b-aa94-d54b5a5ac185 service nova] Acquired lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.303343] env[63028]: DEBUG nova.network.neutron [req-ef6ff9b0-44f8-4785-a96e-b39f093feca5 req-76a7f361-d743-4a2b-aa94-d54b5a5ac185 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Refreshing network info cache for port 51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1148.401126] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "refresh_cache-feb8d206-718d-423a-afff-76c6975934e6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1148.401286] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "refresh_cache-feb8d206-718d-423a-afff-76c6975934e6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.401432] env[63028]: DEBUG nova.network.neutron [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1148.430859] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fcda33-930c-4b33-ba6c-c64df41e7d11 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.439192] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989fd63f-2851-466f-9a21-78a6fcd2f384 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.469182] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff605e67-6afe-46b9-b9fa-46585db45946 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.477488] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a283d0-179f-43a2-b604-f8e6ccefe3ef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.492703] env[63028]: DEBUG nova.compute.provider_tree [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1148.643662] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736414, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.678919] env[63028]: DEBUG nova.compute.manager [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1148.679240] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1148.680142] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014d2a4a-a45e-41f5-905c-5421d7041ea1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.687879] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1148.688150] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7173ba33-7d9d-4191-9d43-75aefb2fee0d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.694046] env[63028]: DEBUG oslo_vmware.api [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1148.694046] env[63028]: value = "task-2736416" [ 1148.694046] env[63028]: _type = "Task" [ 1148.694046] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.701198] env[63028]: DEBUG oslo_vmware.api [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.719745] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736415, 'name': ReconfigVM_Task, 'duration_secs': 0.167714} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.720016] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1148.720788] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8770563-9726-4a4e-b2da-7a17691caba8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.742705] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] f96d4bcd-a032-4e4d-94e4-12d7013d5e3f/f96d4bcd-a032-4e4d-94e4-12d7013d5e3f.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1148.742997] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8aa7126-f1eb-41f0-8ae5-fdf3edd76cd1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.762268] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1148.762268] env[63028]: value = "task-2736417" [ 1148.762268] env[63028]: _type = "Task" [ 1148.762268] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.773074] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736417, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.800159] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: f0ca0d73-d428-4b8c-acac-a80b7b7dd793] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1148.945710] env[63028]: DEBUG nova.network.neutron [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1148.995891] env[63028]: DEBUG nova.scheduler.client.report [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1149.153769] env[63028]: DEBUG oslo_vmware.api [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736414, 'name': PowerOnVM_Task, 'duration_secs': 0.76053} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.154191] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1149.154625] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-f30745a5-feb4-4f16-9a6c-560730babfce tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance '092c7673-97fb-4085-852c-04a7c19a73e7' progress to 100 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1149.204458] env[63028]: DEBUG oslo_vmware.api [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736416, 'name': PowerOffVM_Task, 'duration_secs': 0.178558} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.204767] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1149.204948] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1149.205228] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1cfe61bd-7945-4f4e-913f-84391765aa35 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.211099] env[63028]: DEBUG nova.network.neutron [req-ef6ff9b0-44f8-4785-a96e-b39f093feca5 req-76a7f361-d743-4a2b-aa94-d54b5a5ac185 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updated VIF entry in instance network info cache for port 51c6d9df-5b87-40a0-8c2b-5586869a3c0f. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1149.211472] env[63028]: DEBUG nova.network.neutron [req-ef6ff9b0-44f8-4785-a96e-b39f093feca5 req-76a7f361-d743-4a2b-aa94-d54b5a5ac185 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updating instance_info_cache with network_info: [{"id": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "address": "fa:16:3e:32:0e:40", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51c6d9df-5b", "ovs_interfaceid": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.217291] env[63028]: DEBUG nova.network.neutron [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Updating instance_info_cache with network_info: [{"id": "6e773949-46af-40ce-a861-613b26d282ab", "address": "fa:16:3e:db:17:59", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e773949-46", "ovs_interfaceid": "6e773949-46af-40ce-a861-613b26d282ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.276392] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736417, 'name': ReconfigVM_Task, 'duration_secs': 0.3169} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.278628] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Reconfigured VM instance instance-00000073 to attach disk [datastore2] f96d4bcd-a032-4e4d-94e4-12d7013d5e3f/f96d4bcd-a032-4e4d-94e4-12d7013d5e3f.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1149.279015] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance 'f96d4bcd-a032-4e4d-94e4-12d7013d5e3f' progress to 50 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1149.284388] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1149.284684] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1149.285015] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Deleting the datastore file [datastore2] e048cadf-9dc1-4eb7-a825-422d0736231c {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1149.285404] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afcd831b-47bb-49cf-aac5-025383cd1c49 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.295094] env[63028]: DEBUG oslo_vmware.api [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for the task: (returnval){ [ 1149.295094] env[63028]: value = "task-2736419" [ 1149.295094] env[63028]: _type = "Task" [ 1149.295094] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.307754] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 1d008794-3c1a-46c6-b4eb-3d5441efdb22] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1149.311641] env[63028]: DEBUG oslo_vmware.api [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736419, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.503222] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.271s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.503447] env[63028]: INFO nova.compute.manager [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Migrating [ 1149.510195] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.856s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.510438] env[63028]: DEBUG nova.objects.instance [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lazy-loading 'resources' on Instance uuid 6865f832-d409-4b9b-8b6c-33b0bf07d2b2 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1149.714961] env[63028]: DEBUG oslo_concurrency.lockutils [req-ef6ff9b0-44f8-4785-a96e-b39f093feca5 req-76a7f361-d743-4a2b-aa94-d54b5a5ac185 service nova] Releasing lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.717326] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc910afe-f56d-4916-97f6-fd9105de0131 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.720783] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "refresh_cache-feb8d206-718d-423a-afff-76c6975934e6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.721091] env[63028]: DEBUG nova.compute.manager [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Instance network_info: |[{"id": "6e773949-46af-40ce-a861-613b26d282ab", "address": "fa:16:3e:db:17:59", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e773949-46", "ovs_interfaceid": "6e773949-46af-40ce-a861-613b26d282ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1149.721564] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:17:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '706c9762-1cf8-4770-897d-377d0d927773', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e773949-46af-40ce-a861-613b26d282ab', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1149.728925] env[63028]: DEBUG oslo.service.loopingcall [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1149.729912] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: feb8d206-718d-423a-afff-76c6975934e6] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1149.730166] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e39472dc-6994-417d-a567-b430147bc7ef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.748115] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9722156c-c9c4-4124-85a1-4e53808a4a08 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.780120] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c2f95e-4986-49fa-ac41-8f25c3b3dc69 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.782830] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1149.782830] env[63028]: value = "task-2736420" [ 1149.782830] env[63028]: _type = "Task" [ 1149.782830] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.793021] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f480f010-e25b-457a-9cb1-6df6c427a99f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.796696] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5f2151-26fe-4d6e-b8f6-89cc8b0d80be {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.804137] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736420, 'name': CreateVM_Task} progress is 15%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.822639] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: a50e1167-d8ed-4099-83c3-a5066ab0be1f] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1149.835793] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ba9509-8d87-4fb5-bf84-ad04744cb71e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.838807] env[63028]: DEBUG nova.compute.provider_tree [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1149.840488] env[63028]: DEBUG oslo_vmware.api [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Task: {'id': task-2736419, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158394} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.841418] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1149.841625] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1149.841822] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1149.841995] env[63028]: INFO nova.compute.manager [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1149.842281] env[63028]: DEBUG oslo.service.loopingcall [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1149.842938] env[63028]: DEBUG nova.compute.manager [-] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1149.843073] env[63028]: DEBUG nova.network.neutron [-] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1149.859601] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance 'f96d4bcd-a032-4e4d-94e4-12d7013d5e3f' progress to 67 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1149.884347] env[63028]: DEBUG nova.compute.manager [req-c8f66158-fa33-4863-b4c6-127951000261 req-4dbc136e-caab-4a89-8218-06a5cdcbd319 service nova] [instance: feb8d206-718d-423a-afff-76c6975934e6] Received event network-changed-6e773949-46af-40ce-a861-613b26d282ab {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1149.887019] env[63028]: DEBUG nova.compute.manager [req-c8f66158-fa33-4863-b4c6-127951000261 req-4dbc136e-caab-4a89-8218-06a5cdcbd319 service nova] [instance: feb8d206-718d-423a-afff-76c6975934e6] Refreshing instance network info cache due to event network-changed-6e773949-46af-40ce-a861-613b26d282ab. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1149.887019] env[63028]: DEBUG oslo_concurrency.lockutils [req-c8f66158-fa33-4863-b4c6-127951000261 req-4dbc136e-caab-4a89-8218-06a5cdcbd319 service nova] Acquiring lock "refresh_cache-feb8d206-718d-423a-afff-76c6975934e6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.887019] env[63028]: DEBUG oslo_concurrency.lockutils [req-c8f66158-fa33-4863-b4c6-127951000261 req-4dbc136e-caab-4a89-8218-06a5cdcbd319 service nova] Acquired lock "refresh_cache-feb8d206-718d-423a-afff-76c6975934e6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.887019] env[63028]: DEBUG nova.network.neutron [req-c8f66158-fa33-4863-b4c6-127951000261 req-4dbc136e-caab-4a89-8218-06a5cdcbd319 service nova] [instance: feb8d206-718d-423a-afff-76c6975934e6] Refreshing network info cache for port 6e773949-46af-40ce-a861-613b26d282ab {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1150.022365] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1150.022601] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.022767] env[63028]: DEBUG nova.network.neutron [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1150.297040] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736420, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.342938] env[63028]: DEBUG nova.scheduler.client.report [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1150.346623] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 9773ad95-1894-471d-8020-c7952eac4be4] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1150.436039] env[63028]: DEBUG nova.network.neutron [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Port f5759890-6244-4b8a-9a03-6d628f2441b7 binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1150.643221] env[63028]: DEBUG nova.network.neutron [-] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.678895] env[63028]: DEBUG nova.network.neutron [req-c8f66158-fa33-4863-b4c6-127951000261 req-4dbc136e-caab-4a89-8218-06a5cdcbd319 service nova] [instance: feb8d206-718d-423a-afff-76c6975934e6] Updated VIF entry in instance network info cache for port 6e773949-46af-40ce-a861-613b26d282ab. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1150.679279] env[63028]: DEBUG nova.network.neutron [req-c8f66158-fa33-4863-b4c6-127951000261 req-4dbc136e-caab-4a89-8218-06a5cdcbd319 service nova] [instance: feb8d206-718d-423a-afff-76c6975934e6] Updating instance_info_cache with network_info: [{"id": "6e773949-46af-40ce-a861-613b26d282ab", "address": "fa:16:3e:db:17:59", "network": {"id": "1fdce9cd-bf47-402a-8e16-4c01a25ad481", "bridge": "br-int", "label": "tempest-ServersTestJSON-1363423183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea26842446ec4691a6456a6659188704", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e773949-46", "ovs_interfaceid": "6e773949-46af-40ce-a861-613b26d282ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.794951] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736420, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.829765] env[63028]: DEBUG nova.network.neutron [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance_info_cache with network_info: [{"id": "6ecb125b-389c-4dce-8446-368a7298e497", "address": "fa:16:3e:f4:06:c4", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecb125b-38", "ovs_interfaceid": "6ecb125b-389c-4dce-8446-368a7298e497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.850182] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.340s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.852831] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 4e859327-ccd3-440e-b884-67f6cdadf97f] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1150.877452] env[63028]: INFO nova.scheduler.client.report [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Deleted allocations for instance 6865f832-d409-4b9b-8b6c-33b0bf07d2b2 [ 1151.146245] env[63028]: INFO nova.compute.manager [-] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Took 1.30 seconds to deallocate network for instance. [ 1151.182531] env[63028]: DEBUG oslo_concurrency.lockutils [req-c8f66158-fa33-4863-b4c6-127951000261 req-4dbc136e-caab-4a89-8218-06a5cdcbd319 service nova] Releasing lock "refresh_cache-feb8d206-718d-423a-afff-76c6975934e6" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1151.298818] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736420, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.332309] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1151.355895] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 022125c4-2b0c-4a2c-ae63-18968887316e] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1151.387528] env[63028]: DEBUG oslo_concurrency.lockutils [None req-3b8be475-87c0-4bb0-9306-253deff7c463 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "6865f832-d409-4b9b-8b6c-33b0bf07d2b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.591s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.453161] env[63028]: DEBUG oslo_concurrency.lockutils [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "092c7673-97fb-4085-852c-04a7c19a73e7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.453417] env[63028]: DEBUG oslo_concurrency.lockutils [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "092c7673-97fb-4085-852c-04a7c19a73e7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.453604] env[63028]: DEBUG nova.compute.manager [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Going to confirm migration 7 {{(pid=63028) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1151.460421] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.460636] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.460813] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.653729] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.654079] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.654335] env[63028]: DEBUG nova.objects.instance [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lazy-loading 'resources' on Instance uuid e048cadf-9dc1-4eb7-a825-422d0736231c {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1151.796023] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736420, 'name': CreateVM_Task, 'duration_secs': 1.991224} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.796212] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: feb8d206-718d-423a-afff-76c6975934e6] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1151.796947] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.797130] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.797457] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1151.797721] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2575d82-ac52-4f6e-856a-1e686b1d3c97 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.802153] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1151.802153] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528016b7-ab01-dc98-9ea6-767f2635996c" [ 1151.802153] env[63028]: _type = "Task" [ 1151.802153] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.809360] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528016b7-ab01-dc98-9ea6-767f2635996c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.859383] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 13e0ca05-3ab3-43e2-8b0d-8045e26d6723] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1151.912340] env[63028]: DEBUG nova.compute.manager [req-edaddba9-8b2b-49c3-a17c-e5454b8c5224 req-0c5a1c93-0c20-4029-a2ff-25ff011d4a24 service nova] [instance: e048cadf-9dc1-4eb7-a825-422d0736231c] Received event network-vif-deleted-60891063-6c30-480a-8e2b-f3960496f2fd {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1151.999903] env[63028]: DEBUG oslo_concurrency.lockutils [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.000135] env[63028]: DEBUG oslo_concurrency.lockutils [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquired lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.000354] env[63028]: DEBUG nova.network.neutron [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1152.000570] env[63028]: DEBUG nova.objects.instance [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lazy-loading 'info_cache' on Instance uuid 092c7673-97fb-4085-852c-04a7c19a73e7 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1152.313144] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528016b7-ab01-dc98-9ea6-767f2635996c, 'name': SearchDatastore_Task, 'duration_secs': 0.02642} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.313521] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.313789] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1152.314802] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.314802] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.314802] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1152.315060] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17a4d74f-f95a-49be-8f85-56c004a604f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.327552] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1152.327736] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1152.329126] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a321372-fab4-4cec-b229-5d22115030a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.331638] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20452cd9-fee9-4ef6-9fbd-60ee0c110bff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.337258] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1152.337258] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523b497b-79a7-1284-20a3-b8ebf9d91e4a" [ 1152.337258] env[63028]: _type = "Task" [ 1152.337258] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.345601] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365a4809-30f4-4fe4-ace0-5afedb3ecbb9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.352128] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523b497b-79a7-1284-20a3-b8ebf9d91e4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.377508] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 56d39801-f3e7-4cfe-a038-6a5e762bfda8] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1152.380088] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5148b9ed-9307-4f73-8222-da329d700218 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.387333] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109cebdd-4cdb-4030-8aa3-72d1648b4e89 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.401166] env[63028]: DEBUG nova.compute.provider_tree [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.496338] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.496577] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.496774] env[63028]: DEBUG nova.network.neutron [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1152.851074] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]523b497b-79a7-1284-20a3-b8ebf9d91e4a, 'name': SearchDatastore_Task, 'duration_secs': 0.035968} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.852729] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e636a51f-0616-4a96-8372-0fc0dae96a09 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.855124] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9906a8c1-5388-4ab7-812a-6830b0bc1925 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.872465] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance 'c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7' progress to 0 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1152.876528] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1152.876528] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52dd624b-0a38-2660-3f65-5c1db8fe9468" [ 1152.876528] env[63028]: _type = "Task" [ 1152.876528] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.885201] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: f4718363-73b2-4016-8849-f75e98259023] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1152.886737] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52dd624b-0a38-2660-3f65-5c1db8fe9468, 'name': SearchDatastore_Task, 'duration_secs': 0.012099} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.887143] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.887389] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] feb8d206-718d-423a-afff-76c6975934e6/feb8d206-718d-423a-afff-76c6975934e6.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1152.887616] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea1c18f0-9a7d-4981-b6d9-5bc2d973bbc8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.893706] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1152.893706] env[63028]: value = "task-2736421" [ 1152.893706] env[63028]: _type = "Task" [ 1152.893706] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.901178] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736421, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.904093] env[63028]: DEBUG nova.scheduler.client.report [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1153.153225] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.153615] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.379746] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1153.380134] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ffe9fda-15f1-4400-8405-17e4170a5775 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.388336] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 514c83d1-4fb1-435c-8c25-aa112c744131] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1153.390822] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1153.390822] env[63028]: value = "task-2736422" [ 1153.390822] env[63028]: _type = "Task" [ 1153.390822] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.399469] env[63028]: DEBUG nova.network.neutron [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance_info_cache with network_info: [{"id": "f5759890-6244-4b8a-9a03-6d628f2441b7", "address": "fa:16:3e:61:25:8b", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5759890-62", "ovs_interfaceid": "f5759890-6244-4b8a-9a03-6d628f2441b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.414014] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.760s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.416271] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736422, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.421062] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736421, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.434945] env[63028]: DEBUG nova.network.neutron [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance_info_cache with network_info: [{"id": "eda2613a-55b1-4516-80ce-192d52a6abe6", "address": "fa:16:3e:43:ad:63", "network": {"id": "a3d9dede-4b44-4172-9541-24cc42ad633f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-595575190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "847e89af959a4266ab55c1d2106ba8fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeda2613a-55", "ovs_interfaceid": "eda2613a-55b1-4516-80ce-192d52a6abe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.440157] env[63028]: INFO nova.scheduler.client.report [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Deleted allocations for instance e048cadf-9dc1-4eb7-a825-422d0736231c [ 1153.655764] env[63028]: DEBUG nova.compute.manager [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1153.894469] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 70888889-4965-47ab-ad47-59f1c1286bd8] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1153.907577] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.910932] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736422, 'name': PowerOffVM_Task, 'duration_secs': 0.170727} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.911721] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1153.911987] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance 'c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7' progress to 17 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1153.917884] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736421, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533624} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.918760] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] feb8d206-718d-423a-afff-76c6975934e6/feb8d206-718d-423a-afff-76c6975934e6.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1153.918971] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1153.919217] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bba2e1d1-7e4f-45a3-a6b1-7d9c22f2f93e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.925667] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1153.925667] env[63028]: value = "task-2736423" [ 1153.925667] env[63028]: _type = "Task" [ 1153.925667] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.934331] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736423, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.938041] env[63028]: DEBUG oslo_concurrency.lockutils [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Releasing lock "refresh_cache-092c7673-97fb-4085-852c-04a7c19a73e7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.938318] env[63028]: DEBUG nova.objects.instance [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lazy-loading 'migration_context' on Instance uuid 092c7673-97fb-4085-852c-04a7c19a73e7 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.948099] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4818254d-ce08-453f-b049-80fe18723e71 tempest-AttachInterfacesTestJSON-1120072222 tempest-AttachInterfacesTestJSON-1120072222-project-member] Lock "e048cadf-9dc1-4eb7-a825-422d0736231c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.776s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.175213] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.175466] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.177033] env[63028]: INFO nova.compute.claims [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1154.400718] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: e346c31b-ef1b-4f75-8564-cefe26bd672f] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1154.423886] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1154.424336] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1154.424945] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1154.424945] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1154.425169] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1154.425538] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1154.425538] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1154.425678] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1154.425888] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1154.426103] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1154.427230] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1154.432603] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ecc402f-9272-4625-a4a4-47345a00a961 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.449129] env[63028]: DEBUG nova.objects.base [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Object Instance<092c7673-97fb-4085-852c-04a7c19a73e7> lazy-loaded attributes: info_cache,migration_context {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1154.449953] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2c4586-3043-4057-9c17-680d15531753 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.452979] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2211ec-1e02-4ef1-b797-730a3725d4ae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.462029] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736423, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068867} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.492728] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1154.493260] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1154.493260] env[63028]: value = "task-2736424" [ 1154.493260] env[63028]: _type = "Task" [ 1154.493260] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.495320] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda08616-16f4-474f-b5f6-7ec50ae65bad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.498951] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c551ecd3-9487-40d9-843f-52d24f29e688 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.502376] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2f2504f-1530-4f28-8957-897838564a75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.514437] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance 'f96d4bcd-a032-4e4d-94e4-12d7013d5e3f' progress to 83 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1154.532308] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736424, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.542514] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] feb8d206-718d-423a-afff-76c6975934e6/feb8d206-718d-423a-afff-76c6975934e6.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1154.546451] env[63028]: DEBUG oslo_vmware.api [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1154.546451] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52930542-3412-82c4-d699-eec4baaf09b1" [ 1154.546451] env[63028]: _type = "Task" [ 1154.546451] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.546451] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2fa9786-f967-4ae1-9f5f-47869d58de77 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.571333] env[63028]: DEBUG oslo_vmware.api [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52930542-3412-82c4-d699-eec4baaf09b1, 'name': SearchDatastore_Task, 'duration_secs': 0.008976} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.573228] env[63028]: DEBUG oslo_concurrency.lockutils [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.573697] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1154.573697] env[63028]: value = "task-2736425" [ 1154.573697] env[63028]: _type = "Task" [ 1154.573697] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.582999] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736425, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.903883] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 63524cd8-81de-419f-bb07-0326f3cb393f] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1155.008515] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736424, 'name': ReconfigVM_Task, 'duration_secs': 0.250188} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.009559] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance 'c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7' progress to 33 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1155.045919] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1155.046238] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea23cba6-c124-4b73-8c78-fccf60445872 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.053478] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1155.053478] env[63028]: value = "task-2736426" [ 1155.053478] env[63028]: _type = "Task" [ 1155.053478] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.063259] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736426, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.082050] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736425, 'name': ReconfigVM_Task, 'duration_secs': 0.276009} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.082197] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Reconfigured VM instance instance-00000075 to attach disk [datastore1] feb8d206-718d-423a-afff-76c6975934e6/feb8d206-718d-423a-afff-76c6975934e6.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1155.083052] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8c21e46-8e4d-40be-b7ab-c5aef58f37f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.089303] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1155.089303] env[63028]: value = "task-2736427" [ 1155.089303] env[63028]: _type = "Task" [ 1155.089303] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.098451] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736427, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.349044] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569305a0-0127-433b-8c73-83afd3b2b763 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.356807] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7664db6-6388-4c99-9cbf-3a8603ebb65a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.387751] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10be7cc0-ae42-4751-9115-a83f4c730318 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.395856] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92df28b-df67-4735-8162-45cc1d110af2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.409593] env[63028]: DEBUG nova.compute.provider_tree [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.411192] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: a1d00736-1a8d-46e0-9358-46e848b94797] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1155.515812] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1155.516082] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1155.516245] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1155.516426] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1155.516576] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1155.516722] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1155.516924] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1155.517109] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1155.517283] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1155.517448] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1155.517623] env[63028]: DEBUG nova.virt.hardware [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1155.522836] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Reconfiguring VM instance instance-0000003e to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1155.523138] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a09c9614-1d28-4c90-a33f-3113d40d0e89 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.541618] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1155.541618] env[63028]: value = "task-2736428" [ 1155.541618] env[63028]: _type = "Task" [ 1155.541618] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.553682] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736428, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.561770] env[63028]: DEBUG oslo_vmware.api [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736426, 'name': PowerOnVM_Task, 'duration_secs': 0.393109} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.561770] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1155.561949] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9a1c85-e0e1-4383-bc2f-0e3ab08d3b0d tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance 'f96d4bcd-a032-4e4d-94e4-12d7013d5e3f' progress to 100 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1155.599299] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736427, 'name': Rename_Task, 'duration_secs': 0.137041} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.599695] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1155.600014] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f39d6a51-23a9-481b-8353-641ab03e38cf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.606410] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1155.606410] env[63028]: value = "task-2736429" [ 1155.606410] env[63028]: _type = "Task" [ 1155.606410] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.614717] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736429, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.914124] env[63028]: DEBUG nova.scheduler.client.report [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1155.917460] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 52b19182-a7e2-4461-b4eb-e6cd8a30024e] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1156.052124] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736428, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.117480] env[63028]: DEBUG oslo_vmware.api [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736429, 'name': PowerOnVM_Task, 'duration_secs': 0.468416} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.117675] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1156.117878] env[63028]: INFO nova.compute.manager [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Took 8.67 seconds to spawn the instance on the hypervisor. [ 1156.118064] env[63028]: DEBUG nova.compute.manager [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1156.118841] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc105f87-2ed2-4bd6-8624-726edabe3693 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.420489] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.245s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.421609] env[63028]: DEBUG nova.compute.manager [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1156.423860] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 85aafadb-81d6-4687-aed1-fbe829e5f95f] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1156.425462] env[63028]: DEBUG oslo_concurrency.lockutils [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.852s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1156.553140] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736428, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.634905] env[63028]: INFO nova.compute.manager [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Took 16.38 seconds to build instance. [ 1156.929107] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 15326f55-2db8-47c3-b1fd-ce8ba1174c79] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1156.931886] env[63028]: DEBUG nova.compute.utils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1156.937566] env[63028]: DEBUG nova.compute.manager [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1156.937833] env[63028]: DEBUG nova.network.neutron [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1157.056602] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736428, 'name': ReconfigVM_Task, 'duration_secs': 1.261776} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.057467] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Reconfigured VM instance instance-0000003e to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1157.057911] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027f3921-d944-4afc-a11f-8dfc51d74f3f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.092903] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7/c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1157.097566] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e1afea1-5c8c-409d-a630-7e20abd0d90c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.119737] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1157.119737] env[63028]: value = "task-2736430" [ 1157.119737] env[63028]: _type = "Task" [ 1157.119737] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.137270] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8095c027-a15e-4bdc-a3e5-37b8018eaa83 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "feb8d206-718d-423a-afff-76c6975934e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.893s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.137972] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.145437] env[63028]: DEBUG nova.policy [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54076b7d25474185b3f205437cb68be8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68de7445caeb4381b9e68c685ccb5e0b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1157.192625] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b9ad31-4dab-4f4b-bde6-7071dcadd6d5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.201278] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0248cb30-cfd8-464c-95b0-2bad92e7131b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.238474] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b28681c-680c-4421-bbb6-9223faa6e0b0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.246169] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f43af8-1acb-49a1-84b0-c6ff5098312c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.259600] env[63028]: DEBUG nova.compute.provider_tree [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.428651] env[63028]: DEBUG oslo_concurrency.lockutils [None req-26247c42-72a1-4188-932b-304ce27487f8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "feb8d206-718d-423a-afff-76c6975934e6" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1157.429026] env[63028]: DEBUG oslo_concurrency.lockutils [None req-26247c42-72a1-4188-932b-304ce27487f8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "feb8d206-718d-423a-afff-76c6975934e6" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1157.429271] env[63028]: DEBUG nova.compute.manager [None req-26247c42-72a1-4188-932b-304ce27487f8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1157.430929] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdf9848-dbd0-4846-9885-b2ff0e0fab1f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.435309] env[63028]: DEBUG nova.compute.manager [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1157.443023] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 7e914e49-0d70-4024-940b-ad2a15e9dff7] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1157.443023] env[63028]: DEBUG nova.compute.manager [None req-26247c42-72a1-4188-932b-304ce27487f8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63028) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1157.443023] env[63028]: DEBUG nova.objects.instance [None req-26247c42-72a1-4188-932b-304ce27487f8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lazy-loading 'flavor' on Instance uuid feb8d206-718d-423a-afff-76c6975934e6 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1157.514484] env[63028]: DEBUG nova.network.neutron [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Successfully created port: 4e372880-e224-473d-8bdf-03af99b8a0ac {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1157.631382] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1157.631660] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1157.631883] env[63028]: DEBUG nova.compute.manager [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Going to confirm migration 8 {{(pid=63028) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1157.633327] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736430, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.762698] env[63028]: DEBUG nova.scheduler.client.report [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.954850] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: cd11b318-9158-4f1d-8aa8-1c9d565bb5d5] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1158.130880] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736430, 'name': ReconfigVM_Task, 'duration_secs': 0.543178} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.131187] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Reconfigured VM instance instance-0000003e to attach disk [datastore2] c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7/c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1158.131464] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance 'c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7' progress to 50 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1158.185269] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1158.188082] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquired lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.188082] env[63028]: DEBUG nova.network.neutron [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1158.188082] env[63028]: DEBUG nova.objects.instance [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lazy-loading 'info_cache' on Instance uuid f96d4bcd-a032-4e4d-94e4-12d7013d5e3f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.461447] env[63028]: DEBUG nova.compute.manager [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1158.465061] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: da88308f-ce62-40af-adae-e38aa506bdd9] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1158.465816] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-26247c42-72a1-4188-932b-304ce27487f8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1158.466061] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e4fbab2-9af8-4da1-a07e-715e67629bdc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.474053] env[63028]: DEBUG oslo_vmware.api [None req-26247c42-72a1-4188-932b-304ce27487f8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1158.474053] env[63028]: value = "task-2736431" [ 1158.474053] env[63028]: _type = "Task" [ 1158.474053] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.483394] env[63028]: DEBUG oslo_vmware.api [None req-26247c42-72a1-4188-932b-304ce27487f8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736431, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.493371] env[63028]: DEBUG nova.virt.hardware [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1158.493642] env[63028]: DEBUG nova.virt.hardware [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1158.493828] env[63028]: DEBUG nova.virt.hardware [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1158.494029] env[63028]: DEBUG nova.virt.hardware [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1158.494183] env[63028]: DEBUG nova.virt.hardware [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1158.494335] env[63028]: DEBUG nova.virt.hardware [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1158.494613] env[63028]: DEBUG nova.virt.hardware [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1158.494796] env[63028]: DEBUG nova.virt.hardware [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1158.494985] env[63028]: DEBUG nova.virt.hardware [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1158.495172] env[63028]: DEBUG nova.virt.hardware [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1158.495380] env[63028]: DEBUG nova.virt.hardware [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1158.496217] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d66b8b2-b0bd-4872-8f6a-69d7651e623e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.505514] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96070cd6-b9c0-49be-ad8a-1f00e0e0e3f6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.642579] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967b5a46-6e84-4985-b5bb-b79fcd2777ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.662869] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd2fea6-2974-4c74-9562-39a3bfd11437 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.681927] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance 'c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7' progress to 67 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1158.779558] env[63028]: DEBUG oslo_concurrency.lockutils [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.354s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.969960] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 3e45e7f3-a34f-4eab-9fff-1c874c832e2a] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1158.985964] env[63028]: DEBUG oslo_vmware.api [None req-26247c42-72a1-4188-932b-304ce27487f8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736431, 'name': PowerOffVM_Task, 'duration_secs': 0.215424} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.989445] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-26247c42-72a1-4188-932b-304ce27487f8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1158.989678] env[63028]: DEBUG nova.compute.manager [None req-26247c42-72a1-4188-932b-304ce27487f8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1158.990498] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce15b007-1f7f-4de5-b925-1ec9d4e1f4bf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.105341] env[63028]: DEBUG nova.network.neutron [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Successfully updated port: 4e372880-e224-473d-8bdf-03af99b8a0ac {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1159.289699] env[63028]: DEBUG nova.network.neutron [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Port 6ecb125b-389c-4dce-8446-368a7298e497 binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1159.328281] env[63028]: DEBUG nova.compute.manager [req-b3c564ce-08f9-4068-a9a8-c0f16e37cdfb req-1bb6bbd2-d2c7-4aa8-a57b-5d63f1b4d7cd service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Received event network-vif-plugged-4e372880-e224-473d-8bdf-03af99b8a0ac {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1159.328866] env[63028]: DEBUG oslo_concurrency.lockutils [req-b3c564ce-08f9-4068-a9a8-c0f16e37cdfb req-1bb6bbd2-d2c7-4aa8-a57b-5d63f1b4d7cd service nova] Acquiring lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.328866] env[63028]: DEBUG oslo_concurrency.lockutils [req-b3c564ce-08f9-4068-a9a8-c0f16e37cdfb req-1bb6bbd2-d2c7-4aa8-a57b-5d63f1b4d7cd service nova] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.328866] env[63028]: DEBUG oslo_concurrency.lockutils [req-b3c564ce-08f9-4068-a9a8-c0f16e37cdfb req-1bb6bbd2-d2c7-4aa8-a57b-5d63f1b4d7cd service nova] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.329077] env[63028]: DEBUG nova.compute.manager [req-b3c564ce-08f9-4068-a9a8-c0f16e37cdfb req-1bb6bbd2-d2c7-4aa8-a57b-5d63f1b4d7cd service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] No waiting events found dispatching network-vif-plugged-4e372880-e224-473d-8bdf-03af99b8a0ac {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1159.329188] env[63028]: WARNING nova.compute.manager [req-b3c564ce-08f9-4068-a9a8-c0f16e37cdfb req-1bb6bbd2-d2c7-4aa8-a57b-5d63f1b4d7cd service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Received unexpected event network-vif-plugged-4e372880-e224-473d-8bdf-03af99b8a0ac for instance with vm_state building and task_state spawning. [ 1159.343968] env[63028]: INFO nova.scheduler.client.report [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted allocation for migration 580a6c87-377c-4c7e-ada0-3e213c6f38f8 [ 1159.402125] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "e5767896-8203-4b18-826f-dcb2fe02268e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.402399] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "e5767896-8203-4b18-826f-dcb2fe02268e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.402588] env[63028]: INFO nova.compute.manager [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Shelving [ 1159.476274] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 5982cd5d-abf1-42d4-bb44-8d79de599f11] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1159.497682] env[63028]: DEBUG nova.network.neutron [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance_info_cache with network_info: [{"id": "f5759890-6244-4b8a-9a03-6d628f2441b7", "address": "fa:16:3e:61:25:8b", "network": {"id": "9c32515c-66bf-4a5f-9011-975fe2f6b264", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1101331936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efc6d89903c454eb39136a76e0adef5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5759890-62", "ovs_interfaceid": "f5759890-6244-4b8a-9a03-6d628f2441b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.506565] env[63028]: DEBUG oslo_concurrency.lockutils [None req-26247c42-72a1-4188-932b-304ce27487f8 tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "feb8d206-718d-423a-afff-76c6975934e6" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.077s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.611073] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "refresh_cache-e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.611073] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired lock "refresh_cache-e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.611073] env[63028]: DEBUG nova.network.neutron [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1159.851443] env[63028]: DEBUG oslo_concurrency.lockutils [None req-334ade4b-af4a-4e4b-b587-cbb1a2b3d2fe tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "092c7673-97fb-4085-852c-04a7c19a73e7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.398s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.981193] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 1316318e-8dcf-4ac2-b40a-6a3ab6964997] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1160.000568] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Releasing lock "refresh_cache-f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.000853] env[63028]: DEBUG nova.objects.instance [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lazy-loading 'migration_context' on Instance uuid f96d4bcd-a032-4e4d-94e4-12d7013d5e3f {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1160.041141] env[63028]: INFO nova.compute.manager [None req-e1f4736d-3145-4f11-8ef7-3bd4cd5eed0f tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Get console output [ 1160.041333] env[63028]: WARNING nova.virt.vmwareapi.driver [None req-e1f4736d-3145-4f11-8ef7-3bd4cd5eed0f tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] The console log is missing. Check your VSPC configuration [ 1160.141775] env[63028]: DEBUG nova.network.neutron [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1160.278320] env[63028]: DEBUG nova.network.neutron [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Updating instance_info_cache with network_info: [{"id": "4e372880-e224-473d-8bdf-03af99b8a0ac", "address": "fa:16:3e:2f:e1:95", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e372880-e2", "ovs_interfaceid": "4e372880-e224-473d-8bdf-03af99b8a0ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.314570] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.314982] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.315295] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.373451] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Acquiring lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.373896] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.420627] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1160.420627] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c57d0b7-720c-4f69-bb87-45340246047a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.426392] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1160.426392] env[63028]: value = "task-2736432" [ 1160.426392] env[63028]: _type = "Task" [ 1160.426392] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.435349] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736432, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.482953] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 600195de-ceb4-41a6-9ade-dda8b898e4db] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1160.504042] env[63028]: DEBUG nova.objects.base [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1160.504800] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c88b0d5-f8cc-4a8e-84b4-5ad6ca7a8b00 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.525712] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e90b743-ae01-40a0-b94c-d6b6825fb2ee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.531967] env[63028]: DEBUG oslo_vmware.api [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1160.531967] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520d1b82-54df-5be5-4788-92ef16a73e20" [ 1160.531967] env[63028]: _type = "Task" [ 1160.531967] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.539995] env[63028]: DEBUG oslo_vmware.api [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520d1b82-54df-5be5-4788-92ef16a73e20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.782477] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Releasing lock "refresh_cache-e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.782830] env[63028]: DEBUG nova.compute.manager [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Instance network_info: |[{"id": "4e372880-e224-473d-8bdf-03af99b8a0ac", "address": "fa:16:3e:2f:e1:95", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e372880-e2", "ovs_interfaceid": "4e372880-e224-473d-8bdf-03af99b8a0ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1160.783290] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:e1:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b356db78-99c7-4464-822c-fc7e193f7878', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e372880-e224-473d-8bdf-03af99b8a0ac', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1160.793052] env[63028]: DEBUG oslo.service.loopingcall [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1160.793052] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1160.793052] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fa41d0c-2751-446b-9707-c8c6ac48696d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.812137] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1160.812137] env[63028]: value = "task-2736433" [ 1160.812137] env[63028]: _type = "Task" [ 1160.812137] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.827281] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736433, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.876902] env[63028]: DEBUG nova.compute.manager [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1160.939682] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736432, 'name': PowerOffVM_Task, 'duration_secs': 0.194465} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.939955] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1160.940850] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564a609e-77f6-4958-a663-c2704d3719cf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.959192] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1555c9da-0c1e-4064-8566-5533b72ccdef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.986399] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: c06813c4-472d-4bf9-84ec-0d01306bcd48] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1161.041936] env[63028]: DEBUG oslo_vmware.api [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520d1b82-54df-5be5-4788-92ef16a73e20, 'name': SearchDatastore_Task, 'duration_secs': 0.01035} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.042247] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.042527] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.322244] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736433, 'name': CreateVM_Task, 'duration_secs': 0.295227} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.322444] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1161.323149] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1161.323321] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.323648] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1161.323894] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-161eb037-c79b-4fb6-946c-7733f16f0b91 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.331709] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1161.331709] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52217c7e-2a4a-1bc4-b780-a58985dfcfab" [ 1161.331709] env[63028]: _type = "Task" [ 1161.331709] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.339278] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52217c7e-2a4a-1bc4-b780-a58985dfcfab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.351546] env[63028]: DEBUG nova.compute.manager [req-bed92951-bb42-4d9b-8c03-3b6a3a6cd7a1 req-9da86c6c-2b05-4949-a363-26818cfcef55 service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Received event network-changed-4e372880-e224-473d-8bdf-03af99b8a0ac {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1161.351741] env[63028]: DEBUG nova.compute.manager [req-bed92951-bb42-4d9b-8c03-3b6a3a6cd7a1 req-9da86c6c-2b05-4949-a363-26818cfcef55 service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Refreshing instance network info cache due to event network-changed-4e372880-e224-473d-8bdf-03af99b8a0ac. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1161.352032] env[63028]: DEBUG oslo_concurrency.lockutils [req-bed92951-bb42-4d9b-8c03-3b6a3a6cd7a1 req-9da86c6c-2b05-4949-a363-26818cfcef55 service nova] Acquiring lock "refresh_cache-e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1161.352190] env[63028]: DEBUG oslo_concurrency.lockutils [req-bed92951-bb42-4d9b-8c03-3b6a3a6cd7a1 req-9da86c6c-2b05-4949-a363-26818cfcef55 service nova] Acquired lock "refresh_cache-e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.352370] env[63028]: DEBUG nova.network.neutron [req-bed92951-bb42-4d9b-8c03-3b6a3a6cd7a1 req-9da86c6c-2b05-4949-a363-26818cfcef55 service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Refreshing network info cache for port 4e372880-e224-473d-8bdf-03af99b8a0ac {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1161.361137] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1161.361304] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.361471] env[63028]: DEBUG nova.network.neutron [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1161.401803] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.469256] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1161.469542] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5f858e62-8bfc-4f89-bb94-6da5530ccc31 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.477583] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1161.477583] env[63028]: value = "task-2736434" [ 1161.477583] env[63028]: _type = "Task" [ 1161.477583] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.486170] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736434, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.489720] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 6e0959ac-8fca-47eb-b501-b50a3e9f025a] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1161.617888] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "feb8d206-718d-423a-afff-76c6975934e6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.618230] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "feb8d206-718d-423a-afff-76c6975934e6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.618479] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "feb8d206-718d-423a-afff-76c6975934e6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.618709] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "feb8d206-718d-423a-afff-76c6975934e6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.618920] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "feb8d206-718d-423a-afff-76c6975934e6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.621626] env[63028]: INFO nova.compute.manager [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Terminating instance [ 1161.718685] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb93ba1-ccb8-4c12-8894-7d78e79467a0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.726206] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28085ee0-5e9d-4a50-9e0e-83630b8a48f1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.757626] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a7c64c-bcdf-4913-b4ff-2d679410518f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.765475] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9067427b-1539-4000-a73d-e93a650eb772 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.778923] env[63028]: DEBUG nova.compute.provider_tree [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1161.842300] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52217c7e-2a4a-1bc4-b780-a58985dfcfab, 'name': SearchDatastore_Task, 'duration_secs': 0.010082} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.842658] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1161.842892] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1161.843132] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1161.843286] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.843476] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1161.843795] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e006e5b-c394-411c-afdb-7d3afcd62a70 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.852386] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1161.852602] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1161.853357] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98d7ffda-922d-46e2-bfb7-6001d0edca16 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.860471] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1161.860471] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e4c28a-e222-400c-b807-d734708e33da" [ 1161.860471] env[63028]: _type = "Task" [ 1161.860471] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.869655] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e4c28a-e222-400c-b807-d734708e33da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.988464] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736434, 'name': CreateSnapshot_Task, 'duration_secs': 0.379955} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.988464] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1161.988717] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c64db4-049b-4baa-b700-16304502b9fc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.993281] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: b9d9fe4e-438c-4f68-b011-9eb9e10a381c] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1162.128021] env[63028]: DEBUG nova.compute.manager [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1162.128021] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1162.128021] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3026cf52-03a9-4f6b-849b-fa440f204a89 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.135392] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1162.137967] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2d39a53-9c2b-430f-bd73-05fad51e9859 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.283841] env[63028]: DEBUG nova.scheduler.client.report [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1162.371558] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e4c28a-e222-400c-b807-d734708e33da, 'name': SearchDatastore_Task, 'duration_secs': 0.00872} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.372651] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d60570e7-eb42-422c-b465-82c2add54736 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.377609] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1162.377609] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52012e14-2f70-29cb-a818-af600e2ef50a" [ 1162.377609] env[63028]: _type = "Task" [ 1162.377609] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.386611] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52012e14-2f70-29cb-a818-af600e2ef50a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.500558] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: ba57ed92-aaef-460c-bd45-d0cbe09e4615] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1162.509012] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1162.509683] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3318fcb6-2321-4e51-877f-fee68cdb512b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.519433] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1162.519433] env[63028]: value = "task-2736436" [ 1162.519433] env[63028]: _type = "Task" [ 1162.519433] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.528478] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736436, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.538606] env[63028]: DEBUG nova.network.neutron [req-bed92951-bb42-4d9b-8c03-3b6a3a6cd7a1 req-9da86c6c-2b05-4949-a363-26818cfcef55 service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Updated VIF entry in instance network info cache for port 4e372880-e224-473d-8bdf-03af99b8a0ac. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.539044] env[63028]: DEBUG nova.network.neutron [req-bed92951-bb42-4d9b-8c03-3b6a3a6cd7a1 req-9da86c6c-2b05-4949-a363-26818cfcef55 service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Updating instance_info_cache with network_info: [{"id": "4e372880-e224-473d-8bdf-03af99b8a0ac", "address": "fa:16:3e:2f:e1:95", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e372880-e2", "ovs_interfaceid": "4e372880-e224-473d-8bdf-03af99b8a0ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.555049] env[63028]: DEBUG nova.network.neutron [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance_info_cache with network_info: [{"id": "6ecb125b-389c-4dce-8446-368a7298e497", "address": "fa:16:3e:f4:06:c4", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecb125b-38", "ovs_interfaceid": "6ecb125b-389c-4dce-8446-368a7298e497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.774286] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1162.774594] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1162.774864] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleting the datastore file [datastore1] feb8d206-718d-423a-afff-76c6975934e6 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1162.775196] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04725cbf-8ef9-4f95-93bf-f90e000fc147 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.782747] env[63028]: DEBUG oslo_vmware.api [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1162.782747] env[63028]: value = "task-2736437" [ 1162.782747] env[63028]: _type = "Task" [ 1162.782747] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.792680] env[63028]: DEBUG oslo_vmware.api [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736437, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.892495] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52012e14-2f70-29cb-a818-af600e2ef50a, 'name': SearchDatastore_Task, 'duration_secs': 0.009618} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.893212] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1162.893613] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc/e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1162.894297] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e588a1cb-6acb-4d0e-af0f-ae0808ed87ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.901536] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1162.901536] env[63028]: value = "task-2736438" [ 1162.901536] env[63028]: _type = "Task" [ 1162.901536] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.909903] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.010298] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: f3277886-4498-45c6-be68-e71d8293dc00] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1163.030136] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736436, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.041982] env[63028]: DEBUG oslo_concurrency.lockutils [req-bed92951-bb42-4d9b-8c03-3b6a3a6cd7a1 req-9da86c6c-2b05-4949-a363-26818cfcef55 service nova] Releasing lock "refresh_cache-e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1163.058163] env[63028]: DEBUG oslo_concurrency.lockutils [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1163.295072] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.252s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.298524] env[63028]: DEBUG oslo_vmware.api [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736437, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141336} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.298851] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.897s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.301012] env[63028]: INFO nova.compute.claims [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1163.303411] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1163.303663] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1163.303885] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1163.304157] env[63028]: INFO nova.compute.manager [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: feb8d206-718d-423a-afff-76c6975934e6] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1163.304521] env[63028]: DEBUG oslo.service.loopingcall [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1163.305293] env[63028]: DEBUG nova.compute.manager [-] [instance: feb8d206-718d-423a-afff-76c6975934e6] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1163.305459] env[63028]: DEBUG nova.network.neutron [-] [instance: feb8d206-718d-423a-afff-76c6975934e6] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1163.412436] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736438, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501776} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.412725] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc/e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1163.412948] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1163.413225] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f34de700-7142-4cd6-bbbe-98b5385858d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.419489] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1163.419489] env[63028]: value = "task-2736439" [ 1163.419489] env[63028]: _type = "Task" [ 1163.419489] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.427143] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.513444] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: d663c2df-ae54-4c50-a70f-e2180700c700] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1163.532126] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736436, 'name': CloneVM_Task} progress is 95%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.571923] env[63028]: DEBUG nova.compute.manager [req-261d65fa-ae85-4971-85c1-aae939176ea4 req-fb4cfa61-da77-479a-88b0-12148c15f296 service nova] [instance: feb8d206-718d-423a-afff-76c6975934e6] Received event network-vif-deleted-6e773949-46af-40ce-a861-613b26d282ab {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1163.572228] env[63028]: INFO nova.compute.manager [req-261d65fa-ae85-4971-85c1-aae939176ea4 req-fb4cfa61-da77-479a-88b0-12148c15f296 service nova] [instance: feb8d206-718d-423a-afff-76c6975934e6] Neutron deleted interface 6e773949-46af-40ce-a861-613b26d282ab; detaching it from the instance and deleting it from the info cache [ 1163.572433] env[63028]: DEBUG nova.network.neutron [req-261d65fa-ae85-4971-85c1-aae939176ea4 req-fb4cfa61-da77-479a-88b0-12148c15f296 service nova] [instance: feb8d206-718d-423a-afff-76c6975934e6] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.585153] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47e7b74-95b5-4e2a-9b69-03e882d43a38 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.604978] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278168b1-40ce-473d-a234-619d42b421ea {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.613103] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance 'c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7' progress to 83 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1163.862641] env[63028]: INFO nova.scheduler.client.report [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted allocation for migration 6dd553ad-526a-4c74-9b06-c8c1cfe1fe09 [ 1163.928954] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066954} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.929498] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1163.930291] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec683aa6-c83d-478c-97fd-db9001d693ae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.954917] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc/e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1163.955269] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f959b5e7-fd5f-4db4-9b0f-7300cc6ec881 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.978488] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1163.978488] env[63028]: value = "task-2736440" [ 1163.978488] env[63028]: _type = "Task" [ 1163.978488] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.986440] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736440, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.017240] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.017539] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Cleaning up deleted instances with incomplete migration {{(pid=63028) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1164.030256] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736436, 'name': CloneVM_Task, 'duration_secs': 1.160889} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.031122] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Created linked-clone VM from snapshot [ 1164.031853] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32df32f7-c1a2-4de5-a714-e80ea7dc4fcc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.039739] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Uploading image 792ecd02-edc1-4227-9a1b-93345040c770 {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1164.056632] env[63028]: DEBUG nova.network.neutron [-] [instance: feb8d206-718d-423a-afff-76c6975934e6] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.066747] env[63028]: DEBUG oslo_vmware.rw_handles [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1164.066747] env[63028]: value = "vm-550887" [ 1164.066747] env[63028]: _type = "VirtualMachine" [ 1164.066747] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1164.067020] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c8bb7bdd-6fbf-4653-bb87-17da16486299 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.074156] env[63028]: DEBUG oslo_vmware.rw_handles [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lease: (returnval){ [ 1164.074156] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f61e5d-a8b9-ab8c-1416-93d35c61bfa7" [ 1164.074156] env[63028]: _type = "HttpNfcLease" [ 1164.074156] env[63028]: } obtained for exporting VM: (result){ [ 1164.074156] env[63028]: value = "vm-550887" [ 1164.074156] env[63028]: _type = "VirtualMachine" [ 1164.074156] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1164.074520] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the lease: (returnval){ [ 1164.074520] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f61e5d-a8b9-ab8c-1416-93d35c61bfa7" [ 1164.074520] env[63028]: _type = "HttpNfcLease" [ 1164.074520] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1164.077754] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7025ac90-bf43-4b7e-84ab-2c3718f11b2e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.082854] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1164.082854] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f61e5d-a8b9-ab8c-1416-93d35c61bfa7" [ 1164.082854] env[63028]: _type = "HttpNfcLease" [ 1164.082854] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1164.086424] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475e8a3d-43da-473a-98bb-bb7756a48be7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.117491] env[63028]: DEBUG nova.compute.manager [req-261d65fa-ae85-4971-85c1-aae939176ea4 req-fb4cfa61-da77-479a-88b0-12148c15f296 service nova] [instance: feb8d206-718d-423a-afff-76c6975934e6] Detach interface failed, port_id=6e773949-46af-40ce-a861-613b26d282ab, reason: Instance feb8d206-718d-423a-afff-76c6975934e6 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1164.119468] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1164.119755] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-176cb785-0206-42c1-8ac8-dd9131d49304 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.125868] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1164.125868] env[63028]: value = "task-2736442" [ 1164.125868] env[63028]: _type = "Task" [ 1164.125868] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.133550] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.368067] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.736s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.466132] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84696a77-c825-49da-89ba-8adce4be9e83 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.473319] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529f1774-faea-4df3-8dee-6d3a3c542d5f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.507071] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0b0037-8832-473d-8f2e-26b014c07a13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.514321] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736440, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.517546] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec57a036-f160-46e6-8d24-f4f31de33c62 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.521615] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.532255] env[63028]: DEBUG nova.compute.provider_tree [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.560602] env[63028]: INFO nova.compute.manager [-] [instance: feb8d206-718d-423a-afff-76c6975934e6] Took 1.25 seconds to deallocate network for instance. [ 1164.583669] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1164.583669] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f61e5d-a8b9-ab8c-1416-93d35c61bfa7" [ 1164.583669] env[63028]: _type = "HttpNfcLease" [ 1164.583669] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1164.583669] env[63028]: DEBUG oslo_vmware.rw_handles [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1164.583669] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52f61e5d-a8b9-ab8c-1416-93d35c61bfa7" [ 1164.583669] env[63028]: _type = "HttpNfcLease" [ 1164.583669] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1164.584049] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32be1528-231a-4aa0-a9ea-5e57b916a822 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.591074] env[63028]: DEBUG oslo_vmware.rw_handles [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52202ffe-fe5c-e793-14c5-7304a439cfc7/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1164.591271] env[63028]: DEBUG oslo_vmware.rw_handles [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52202ffe-fe5c-e793-14c5-7304a439cfc7/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1164.662898] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736442, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.688426] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-25f4dd36-ba81-4cd3-a448-2e121a224356 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.015854] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736440, 'name': ReconfigVM_Task, 'duration_secs': 0.723742} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.016369] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Reconfigured VM instance instance-00000076 to attach disk [datastore1] e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc/e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1165.017256] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54ab8c97-e017-4c15-a698-ebd20d99f846 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.024107] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1165.024107] env[63028]: value = "task-2736443" [ 1165.024107] env[63028]: _type = "Task" [ 1165.024107] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.032643] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736443, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.035986] env[63028]: DEBUG nova.scheduler.client.report [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1165.066423] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.164081] env[63028]: DEBUG oslo_vmware.api [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736442, 'name': PowerOnVM_Task, 'duration_secs': 0.604008} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.164649] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1165.165070] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-18406ad5-0840-4895-9de8-675e40b55c28 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance 'c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7' progress to 100 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1165.535791] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736443, 'name': Rename_Task, 'duration_secs': 0.150214} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.536165] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1165.536485] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-897c0dc1-7127-406d-975b-6f6102dcd41e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.541077] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.242s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.541664] env[63028]: DEBUG nova.compute.manager [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1165.546518] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.480s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.547552] env[63028]: DEBUG nova.objects.instance [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lazy-loading 'resources' on Instance uuid feb8d206-718d-423a-afff-76c6975934e6 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1165.549205] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1165.549205] env[63028]: value = "task-2736444" [ 1165.549205] env[63028]: _type = "Task" [ 1165.549205] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.557211] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736444, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.651157] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.651537] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.651826] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.652130] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.652315] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.654703] env[63028]: INFO nova.compute.manager [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Terminating instance [ 1166.050402] env[63028]: DEBUG nova.compute.utils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1166.055267] env[63028]: DEBUG nova.compute.manager [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1166.055416] env[63028]: DEBUG nova.network.neutron [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1166.067282] env[63028]: DEBUG oslo_vmware.api [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736444, 'name': PowerOnVM_Task, 'duration_secs': 0.456341} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.067679] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1166.067995] env[63028]: INFO nova.compute.manager [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Took 7.61 seconds to spawn the instance on the hypervisor. [ 1166.068235] env[63028]: DEBUG nova.compute.manager [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1166.069265] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891df654-ebe3-4c64-9aa0-192b53e66ffe {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.111626] env[63028]: DEBUG nova.policy [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4637c060c7f84806b0f10f63b1269e6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c42bfaec8cf54fc696cf89562c6dc2a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1166.159490] env[63028]: DEBUG nova.compute.manager [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1166.160322] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1166.161189] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdce164-5c23-420d-884d-aa6e6757d9a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.170369] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1166.170982] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff548350-c087-4ae7-a7be-fee1a64543fb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.184537] env[63028]: DEBUG oslo_vmware.api [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1166.184537] env[63028]: value = "task-2736445" [ 1166.184537] env[63028]: _type = "Task" [ 1166.184537] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.193754] env[63028]: DEBUG oslo_vmware.api [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736445, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.255519] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e91c5e1-3a70-46a2-a0b7-72bc15fb6cf1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.264465] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f45441-2ede-4ba6-8072-61af267ef017 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.300484] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815347c5-22a9-4051-bc5a-c915aff8912c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.310229] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c64f91-da9d-42ae-82c5-176abd97da8e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.326116] env[63028]: DEBUG nova.compute.provider_tree [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.485334] env[63028]: DEBUG nova.network.neutron [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Successfully created port: 8f7d6b80-4998-4030-85eb-bc1f302e96b3 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1166.556085] env[63028]: DEBUG nova.compute.manager [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1166.589965] env[63028]: INFO nova.compute.manager [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Took 12.43 seconds to build instance. [ 1166.702238] env[63028]: DEBUG oslo_vmware.api [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736445, 'name': PowerOffVM_Task, 'duration_secs': 0.314428} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.702569] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1166.702769] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1166.703296] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2c74a69-c44e-4984-acaa-8d53d4f83e01 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.769128] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1166.769658] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1166.769883] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleting the datastore file [datastore2] f96d4bcd-a032-4e4d-94e4-12d7013d5e3f {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1166.770219] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6076771d-1b3b-4776-a4e3-7bf514889179 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.777533] env[63028]: DEBUG oslo_vmware.api [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for the task: (returnval){ [ 1166.777533] env[63028]: value = "task-2736447" [ 1166.777533] env[63028]: _type = "Task" [ 1166.777533] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.785833] env[63028]: DEBUG oslo_vmware.api [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736447, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.828936] env[63028]: DEBUG nova.scheduler.client.report [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1167.095638] env[63028]: DEBUG oslo_concurrency.lockutils [None req-1c601d39-c98b-4e91-9004-36a1dd0b0468 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.942s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.204622] env[63028]: DEBUG nova.compute.manager [req-4ea08ca1-a3bb-40e3-a7e0-6fa4a21c128b req-afd6dcba-39a7-4eb3-9c02-a5a25a7dce0a service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Received event network-changed-4e372880-e224-473d-8bdf-03af99b8a0ac {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1167.204839] env[63028]: DEBUG nova.compute.manager [req-4ea08ca1-a3bb-40e3-a7e0-6fa4a21c128b req-afd6dcba-39a7-4eb3-9c02-a5a25a7dce0a service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Refreshing instance network info cache due to event network-changed-4e372880-e224-473d-8bdf-03af99b8a0ac. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1167.205143] env[63028]: DEBUG oslo_concurrency.lockutils [req-4ea08ca1-a3bb-40e3-a7e0-6fa4a21c128b req-afd6dcba-39a7-4eb3-9c02-a5a25a7dce0a service nova] Acquiring lock "refresh_cache-e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1167.205286] env[63028]: DEBUG oslo_concurrency.lockutils [req-4ea08ca1-a3bb-40e3-a7e0-6fa4a21c128b req-afd6dcba-39a7-4eb3-9c02-a5a25a7dce0a service nova] Acquired lock "refresh_cache-e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.205368] env[63028]: DEBUG nova.network.neutron [req-4ea08ca1-a3bb-40e3-a7e0-6fa4a21c128b req-afd6dcba-39a7-4eb3-9c02-a5a25a7dce0a service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Refreshing network info cache for port 4e372880-e224-473d-8bdf-03af99b8a0ac {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1167.288434] env[63028]: DEBUG oslo_vmware.api [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Task: {'id': task-2736447, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.222302} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.288750] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1167.289041] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1167.289247] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1167.289472] env[63028]: INFO nova.compute.manager [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1167.289732] env[63028]: DEBUG oslo.service.loopingcall [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1167.291998] env[63028]: DEBUG nova.compute.manager [-] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1167.291998] env[63028]: DEBUG nova.network.neutron [-] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1167.334745] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.787s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.361667] env[63028]: INFO nova.scheduler.client.report [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted allocations for instance feb8d206-718d-423a-afff-76c6975934e6 [ 1167.567204] env[63028]: DEBUG nova.compute.manager [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1167.605995] env[63028]: DEBUG nova.virt.hardware [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1167.606268] env[63028]: DEBUG nova.virt.hardware [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1167.606433] env[63028]: DEBUG nova.virt.hardware [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1167.607395] env[63028]: DEBUG nova.virt.hardware [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1167.607839] env[63028]: DEBUG nova.virt.hardware [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1167.607839] env[63028]: DEBUG nova.virt.hardware [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1167.607971] env[63028]: DEBUG nova.virt.hardware [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1167.608110] env[63028]: DEBUG nova.virt.hardware [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1167.608284] env[63028]: DEBUG nova.virt.hardware [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1167.608453] env[63028]: DEBUG nova.virt.hardware [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1167.608634] env[63028]: DEBUG nova.virt.hardware [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1167.609496] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b7eed5-d4af-4cb1-bf9b-24dd7818fa19 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.620889] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed58a5d6-d265-432a-9c24-08b74e352ed0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.715630] env[63028]: DEBUG nova.compute.manager [req-12f24bd3-8650-486f-b114-2290ad3b5141 req-5db57655-8f0a-43e7-8d8c-794fe40d2f00 service nova] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Received event network-vif-deleted-f5759890-6244-4b8a-9a03-6d628f2441b7 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1167.715630] env[63028]: INFO nova.compute.manager [req-12f24bd3-8650-486f-b114-2290ad3b5141 req-5db57655-8f0a-43e7-8d8c-794fe40d2f00 service nova] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Neutron deleted interface f5759890-6244-4b8a-9a03-6d628f2441b7; detaching it from the instance and deleting it from the info cache [ 1167.715630] env[63028]: DEBUG nova.network.neutron [req-12f24bd3-8650-486f-b114-2290ad3b5141 req-5db57655-8f0a-43e7-8d8c-794fe40d2f00 service nova] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.869731] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adb328e3-0717-4963-b2fd-bbe34e60a3ca tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "feb8d206-718d-423a-afff-76c6975934e6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.251s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.096774] env[63028]: DEBUG oslo_concurrency.lockutils [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.097114] env[63028]: DEBUG oslo_concurrency.lockutils [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.097366] env[63028]: DEBUG nova.compute.manager [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Going to confirm migration 9 {{(pid=63028) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1168.104173] env[63028]: DEBUG nova.network.neutron [-] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.155532] env[63028]: DEBUG nova.network.neutron [req-4ea08ca1-a3bb-40e3-a7e0-6fa4a21c128b req-afd6dcba-39a7-4eb3-9c02-a5a25a7dce0a service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Updated VIF entry in instance network info cache for port 4e372880-e224-473d-8bdf-03af99b8a0ac. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1168.155928] env[63028]: DEBUG nova.network.neutron [req-4ea08ca1-a3bb-40e3-a7e0-6fa4a21c128b req-afd6dcba-39a7-4eb3-9c02-a5a25a7dce0a service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Updating instance_info_cache with network_info: [{"id": "4e372880-e224-473d-8bdf-03af99b8a0ac", "address": "fa:16:3e:2f:e1:95", "network": {"id": "063de6d9-43e6-4adb-88dc-d4fe17058488", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-659483097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de7445caeb4381b9e68c685ccb5e0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e372880-e2", "ovs_interfaceid": "4e372880-e224-473d-8bdf-03af99b8a0ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.216975] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10abb405-8831-4606-83d1-3140dc743aa7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.226911] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e35e3c-df80-470d-b570-bc2eb2d79fa5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.264130] env[63028]: DEBUG nova.compute.manager [req-12f24bd3-8650-486f-b114-2290ad3b5141 req-5db57655-8f0a-43e7-8d8c-794fe40d2f00 service nova] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Detach interface failed, port_id=f5759890-6244-4b8a-9a03-6d628f2441b7, reason: Instance f96d4bcd-a032-4e4d-94e4-12d7013d5e3f could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1168.264860] env[63028]: DEBUG nova.network.neutron [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Successfully updated port: 8f7d6b80-4998-4030-85eb-bc1f302e96b3 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1168.614071] env[63028]: INFO nova.compute.manager [-] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Took 1.32 seconds to deallocate network for instance. [ 1168.645614] env[63028]: DEBUG oslo_concurrency.lockutils [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.645874] env[63028]: DEBUG oslo_concurrency.lockutils [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.646125] env[63028]: DEBUG nova.network.neutron [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1168.646386] env[63028]: DEBUG nova.objects.instance [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'info_cache' on Instance uuid c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1168.663498] env[63028]: DEBUG oslo_concurrency.lockutils [req-4ea08ca1-a3bb-40e3-a7e0-6fa4a21c128b req-afd6dcba-39a7-4eb3-9c02-a5a25a7dce0a service nova] Releasing lock "refresh_cache-e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1168.767091] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Acquiring lock "refresh_cache-5c63c2bb-4725-4722-98e2-cd5c71944c4e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.767260] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Acquired lock "refresh_cache-5c63c2bb-4725-4722-98e2-cd5c71944c4e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.767414] env[63028]: DEBUG nova.network.neutron [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1168.851898] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.852180] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.852395] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.852613] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.852789] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.855111] env[63028]: INFO nova.compute.manager [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Terminating instance [ 1169.122584] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.123080] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.123293] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1169.146511] env[63028]: INFO nova.scheduler.client.report [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Deleted allocations for instance f96d4bcd-a032-4e4d-94e4-12d7013d5e3f [ 1169.300285] env[63028]: DEBUG nova.network.neutron [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1169.359136] env[63028]: DEBUG nova.compute.manager [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1169.359387] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1169.360340] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629aa76a-2776-4eb3-97de-a75ba639f0bc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.368104] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1169.368335] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b39edff-8b88-4400-b7e1-14fe88e2368f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.375061] env[63028]: DEBUG oslo_vmware.api [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1169.375061] env[63028]: value = "task-2736448" [ 1169.375061] env[63028]: _type = "Task" [ 1169.375061] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.383124] env[63028]: DEBUG oslo_vmware.api [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736448, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.442057] env[63028]: DEBUG nova.network.neutron [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Updating instance_info_cache with network_info: [{"id": "8f7d6b80-4998-4030-85eb-bc1f302e96b3", "address": "fa:16:3e:33:55:74", "network": {"id": "c7cae25e-24fc-44a2-b21f-c89a09b84c93", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1672057628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c42bfaec8cf54fc696cf89562c6dc2a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f7d6b80-49", "ovs_interfaceid": "8f7d6b80-4998-4030-85eb-bc1f302e96b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.658732] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5fcf499f-4c6c-4b87-bb3e-166990c5bf04 tempest-DeleteServersTestJSON-1541253569 tempest-DeleteServersTestJSON-1541253569-project-member] Lock "f96d4bcd-a032-4e4d-94e4-12d7013d5e3f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.007s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1169.736961] env[63028]: DEBUG nova.compute.manager [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Received event network-vif-plugged-8f7d6b80-4998-4030-85eb-bc1f302e96b3 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1169.737226] env[63028]: DEBUG oslo_concurrency.lockutils [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] Acquiring lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.737454] env[63028]: DEBUG oslo_concurrency.lockutils [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] Lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.737678] env[63028]: DEBUG oslo_concurrency.lockutils [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] Lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1169.737865] env[63028]: DEBUG nova.compute.manager [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] No waiting events found dispatching network-vif-plugged-8f7d6b80-4998-4030-85eb-bc1f302e96b3 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1169.738214] env[63028]: WARNING nova.compute.manager [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Received unexpected event network-vif-plugged-8f7d6b80-4998-4030-85eb-bc1f302e96b3 for instance with vm_state building and task_state spawning. [ 1169.738454] env[63028]: DEBUG nova.compute.manager [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Received event network-changed-8f7d6b80-4998-4030-85eb-bc1f302e96b3 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1169.738628] env[63028]: DEBUG nova.compute.manager [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Refreshing instance network info cache due to event network-changed-8f7d6b80-4998-4030-85eb-bc1f302e96b3. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1169.738800] env[63028]: DEBUG oslo_concurrency.lockutils [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] Acquiring lock "refresh_cache-5c63c2bb-4725-4722-98e2-cd5c71944c4e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1169.887583] env[63028]: DEBUG oslo_vmware.api [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736448, 'name': PowerOffVM_Task, 'duration_secs': 0.235183} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.887992] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1169.888064] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1169.888350] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1b41f23-12b1-4dba-90fb-bf0b6c5ad06d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.909055] env[63028]: DEBUG nova.network.neutron [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance_info_cache with network_info: [{"id": "6ecb125b-389c-4dce-8446-368a7298e497", "address": "fa:16:3e:f4:06:c4", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecb125b-38", "ovs_interfaceid": "6ecb125b-389c-4dce-8446-368a7298e497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.944761] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Releasing lock "refresh_cache-5c63c2bb-4725-4722-98e2-cd5c71944c4e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.945501] env[63028]: DEBUG nova.compute.manager [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Instance network_info: |[{"id": "8f7d6b80-4998-4030-85eb-bc1f302e96b3", "address": "fa:16:3e:33:55:74", "network": {"id": "c7cae25e-24fc-44a2-b21f-c89a09b84c93", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1672057628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c42bfaec8cf54fc696cf89562c6dc2a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f7d6b80-49", "ovs_interfaceid": "8f7d6b80-4998-4030-85eb-bc1f302e96b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1169.945877] env[63028]: DEBUG oslo_concurrency.lockutils [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] Acquired lock "refresh_cache-5c63c2bb-4725-4722-98e2-cd5c71944c4e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.946129] env[63028]: DEBUG nova.network.neutron [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Refreshing network info cache for port 8f7d6b80-4998-4030-85eb-bc1f302e96b3 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1169.947841] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:55:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae70d41-6ebf-472a-8504-6530eb37ea41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f7d6b80-4998-4030-85eb-bc1f302e96b3', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1169.957685] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Creating folder: Project (c42bfaec8cf54fc696cf89562c6dc2a3). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1169.963366] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd51390d-0e5a-468e-a805-450b4ce99435 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.966206] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1169.966505] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1169.966779] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleting the datastore file [datastore1] 79f4ef22-a589-4d5c-8832-5d5dcdd55561 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1169.967682] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cc0b059-d321-4f6a-a830-c154431d3197 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.977761] env[63028]: DEBUG oslo_vmware.api [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for the task: (returnval){ [ 1169.977761] env[63028]: value = "task-2736452" [ 1169.977761] env[63028]: _type = "Task" [ 1169.977761] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.983318] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Created folder: Project (c42bfaec8cf54fc696cf89562c6dc2a3) in parent group-v550570. [ 1169.983470] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Creating folder: Instances. Parent ref: group-v550888. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1169.983676] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-68b9469e-eb32-4340-b1d5-a0bef21e1978 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.990694] env[63028]: DEBUG oslo_vmware.api [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.993429] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Created folder: Instances in parent group-v550888. [ 1169.993699] env[63028]: DEBUG oslo.service.loopingcall [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1169.993905] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1169.994158] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3a6208c-a868-4490-8463-5a556e3777ca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.016244] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1170.016244] env[63028]: value = "task-2736454" [ 1170.016244] env[63028]: _type = "Task" [ 1170.016244] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.025140] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736454, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.188568] env[63028]: DEBUG nova.network.neutron [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Updated VIF entry in instance network info cache for port 8f7d6b80-4998-4030-85eb-bc1f302e96b3. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1170.189057] env[63028]: DEBUG nova.network.neutron [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Updating instance_info_cache with network_info: [{"id": "8f7d6b80-4998-4030-85eb-bc1f302e96b3", "address": "fa:16:3e:33:55:74", "network": {"id": "c7cae25e-24fc-44a2-b21f-c89a09b84c93", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1672057628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c42bfaec8cf54fc696cf89562c6dc2a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f7d6b80-49", "ovs_interfaceid": "8f7d6b80-4998-4030-85eb-bc1f302e96b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.411749] env[63028]: DEBUG oslo_concurrency.lockutils [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1170.412107] env[63028]: DEBUG nova.objects.instance [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'migration_context' on Instance uuid c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1170.487586] env[63028]: DEBUG oslo_vmware.api [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Task: {'id': task-2736452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269086} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.487848] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1170.488050] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1170.488241] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1170.488419] env[63028]: INFO nova.compute.manager [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1170.488691] env[63028]: DEBUG oslo.service.loopingcall [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1170.488881] env[63028]: DEBUG nova.compute.manager [-] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1170.488972] env[63028]: DEBUG nova.network.neutron [-] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1170.527364] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736454, 'name': CreateVM_Task, 'duration_secs': 0.452975} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.527544] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1170.538781] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.538994] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.539365] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1170.539612] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9396bb1-2091-421a-959a-d2faf786dc60 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.544346] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Waiting for the task: (returnval){ [ 1170.544346] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52658bea-ae73-53a3-5e7c-6f23fcca3b19" [ 1170.544346] env[63028]: _type = "Task" [ 1170.544346] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.552365] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52658bea-ae73-53a3-5e7c-6f23fcca3b19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.692773] env[63028]: DEBUG oslo_concurrency.lockutils [req-6b15a351-8065-441d-b18d-fceee56c862b req-d60884f0-2ce4-4e58-b586-acacbb172404 service nova] Releasing lock "refresh_cache-5c63c2bb-4725-4722-98e2-cd5c71944c4e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1170.915616] env[63028]: DEBUG nova.objects.base [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1170.917383] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba97bc6-a2c6-4579-9bda-b300c1098674 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.936532] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-026230fc-e442-4ac6-865b-82846916d5e6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.942128] env[63028]: DEBUG oslo_vmware.api [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1170.942128] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52746285-ee2e-fd60-cd98-86640b80e69e" [ 1170.942128] env[63028]: _type = "Task" [ 1170.942128] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.951949] env[63028]: DEBUG oslo_vmware.api [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52746285-ee2e-fd60-cd98-86640b80e69e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.054340] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52658bea-ae73-53a3-5e7c-6f23fcca3b19, 'name': SearchDatastore_Task, 'duration_secs': 0.024984} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.054652] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1171.055399] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1171.055399] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1171.055399] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.055583] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1171.055697] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73826962-f73f-474c-9c51-323435d83ba5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.064860] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1171.065077] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1171.065833] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dda56a8e-b853-4f4f-abf5-e2cc91c5c722 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.070866] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Waiting for the task: (returnval){ [ 1171.070866] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d66a06-5613-dcd8-808f-0ef5afe7c5cf" [ 1171.070866] env[63028]: _type = "Task" [ 1171.070866] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.078541] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d66a06-5613-dcd8-808f-0ef5afe7c5cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.277739] env[63028]: DEBUG nova.network.neutron [-] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.453569] env[63028]: DEBUG oslo_vmware.api [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52746285-ee2e-fd60-cd98-86640b80e69e, 'name': SearchDatastore_Task, 'duration_secs': 0.012986} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.453867] env[63028]: DEBUG oslo_concurrency.lockutils [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.454119] env[63028]: DEBUG oslo_concurrency.lockutils [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.581680] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d66a06-5613-dcd8-808f-0ef5afe7c5cf, 'name': SearchDatastore_Task, 'duration_secs': 0.017527} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.582513] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a45dc6e7-578d-4c10-8e67-e5c7b471aa46 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.588926] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Waiting for the task: (returnval){ [ 1171.588926] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520b12e5-3c87-4e2b-faf3-a6c4179ef716" [ 1171.588926] env[63028]: _type = "Task" [ 1171.588926] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.597009] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520b12e5-3c87-4e2b-faf3-a6c4179ef716, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.772645] env[63028]: DEBUG nova.compute.manager [req-4b76ae3b-70db-4ac2-8767-d6d35a8c1dad req-a385cf14-1e3f-49d2-ab65-bc69acfae225 service nova] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Received event network-vif-deleted-0b5a99ec-110c-4325-b36a-92007f8e9e6a {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1171.782153] env[63028]: INFO nova.compute.manager [-] [instance: 79f4ef22-a589-4d5c-8832-5d5dcdd55561] Took 1.29 seconds to deallocate network for instance. [ 1172.103343] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520b12e5-3c87-4e2b-faf3-a6c4179ef716, 'name': SearchDatastore_Task, 'duration_secs': 0.019737} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.103476] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.103794] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 5c63c2bb-4725-4722-98e2-cd5c71944c4e/5c63c2bb-4725-4722-98e2-cd5c71944c4e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1172.104962] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e1504b4-b16e-4cb5-9bb5-80619cf83eba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.107429] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50593476-ce81-46e0-ae41-04b04b92da1f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.115624] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de70c865-acae-4698-80cf-8df535000bf2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.119049] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Waiting for the task: (returnval){ [ 1172.119049] env[63028]: value = "task-2736455" [ 1172.119049] env[63028]: _type = "Task" [ 1172.119049] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.149249] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab5df7c-7b93-470c-a76e-f1df4c308b36 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.154996] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736455, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.159831] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f701f3db-5c29-4f7a-bd12-f52155e53c53 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.173908] env[63028]: DEBUG nova.compute.provider_tree [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.290810] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.421758] env[63028]: DEBUG oslo_vmware.rw_handles [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52202ffe-fe5c-e793-14c5-7304a439cfc7/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1172.422675] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b5a2e3-9eee-4743-a65b-ecd0fc575e2e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.429205] env[63028]: DEBUG oslo_vmware.rw_handles [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52202ffe-fe5c-e793-14c5-7304a439cfc7/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1172.429375] env[63028]: ERROR oslo_vmware.rw_handles [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52202ffe-fe5c-e793-14c5-7304a439cfc7/disk-0.vmdk due to incomplete transfer. [ 1172.429590] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-23b08b86-259b-4275-83ea-b001ed689b80 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.436420] env[63028]: DEBUG oslo_vmware.rw_handles [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52202ffe-fe5c-e793-14c5-7304a439cfc7/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1172.436618] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Uploaded image 792ecd02-edc1-4227-9a1b-93345040c770 to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1172.438912] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1172.439179] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-efd146dc-8b05-417d-b450-6e13f2fdf186 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.444254] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1172.444254] env[63028]: value = "task-2736456" [ 1172.444254] env[63028]: _type = "Task" [ 1172.444254] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.451735] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736456, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.629911] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736455, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.677244] env[63028]: DEBUG nova.scheduler.client.report [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1172.954814] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736456, 'name': Destroy_Task, 'duration_secs': 0.340094} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.955054] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Destroyed the VM [ 1172.955340] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1172.955640] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-03f18b44-0e78-408f-93d6-4a74eb7bdba6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.962212] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1172.962212] env[63028]: value = "task-2736457" [ 1172.962212] env[63028]: _type = "Task" [ 1172.962212] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.971315] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736457, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.018849] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.019125] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.132139] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736455, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.955464} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.132695] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 5c63c2bb-4725-4722-98e2-cd5c71944c4e/5c63c2bb-4725-4722-98e2-cd5c71944c4e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1173.132922] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1173.133198] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d1f014c-45af-4cb9-9e3f-0418211f9167 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.140548] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Waiting for the task: (returnval){ [ 1173.140548] env[63028]: value = "task-2736458" [ 1173.140548] env[63028]: _type = "Task" [ 1173.140548] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.151168] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736458, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.471355] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736457, 'name': RemoveSnapshot_Task, 'duration_secs': 0.348123} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.471644] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1173.471917] env[63028]: DEBUG nova.compute.manager [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1173.472694] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11428c6-8433-4503-9d22-bd7d76b2bfcd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.531654] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.531860] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.532020] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.532180] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.532379] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.532495] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.651735] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736458, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.324742} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.651735] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1173.652347] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7b486c-222c-4599-89aa-c252c59372b5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.675935] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 5c63c2bb-4725-4722-98e2-cd5c71944c4e/5c63c2bb-4725-4722-98e2-cd5c71944c4e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1173.676557] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4393d55-487d-4cdd-bb08-8d8f20e558d3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.691769] env[63028]: DEBUG oslo_concurrency.lockutils [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.237s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.694478] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.404s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.694703] env[63028]: DEBUG nova.objects.instance [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lazy-loading 'resources' on Instance uuid 79f4ef22-a589-4d5c-8832-5d5dcdd55561 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1173.701671] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Waiting for the task: (returnval){ [ 1173.701671] env[63028]: value = "task-2736459" [ 1173.701671] env[63028]: _type = "Task" [ 1173.701671] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.711248] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736459, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.985716] env[63028]: INFO nova.compute.manager [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Shelve offloading [ 1174.037309] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Getting list of instances from cluster (obj){ [ 1174.037309] env[63028]: value = "domain-c8" [ 1174.037309] env[63028]: _type = "ClusterComputeResource" [ 1174.037309] env[63028]: } {{(pid=63028) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1174.038562] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2c8e49-58dc-4cd0-b657-72912179d7d3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.056514] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Got total of 8 instances {{(pid=63028) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1174.056709] env[63028]: WARNING nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] While synchronizing instance power states, found 9 instances in the database and 8 instances on the hypervisor. [ 1174.056901] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Triggering sync for uuid d6137c80-0c09-4655-b264-472753b4fa9c {{(pid=63028) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1174.057132] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Triggering sync for uuid c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 {{(pid=63028) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1174.057298] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Triggering sync for uuid 79f4ef22-a589-4d5c-8832-5d5dcdd55561 {{(pid=63028) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1174.057452] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Triggering sync for uuid 4ec96b68-2fdb-4150-8d26-53fdf79c8e26 {{(pid=63028) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1174.058023] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Triggering sync for uuid e5767896-8203-4b18-826f-dcb2fe02268e {{(pid=63028) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1174.058023] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Triggering sync for uuid 092c7673-97fb-4085-852c-04a7c19a73e7 {{(pid=63028) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1174.058023] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Triggering sync for uuid b438b12e-874a-4883-b606-c28258e5a01a {{(pid=63028) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1174.058179] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Triggering sync for uuid e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc {{(pid=63028) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1174.058179] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Triggering sync for uuid 5c63c2bb-4725-4722-98e2-cd5c71944c4e {{(pid=63028) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1174.058521] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "d6137c80-0c09-4655-b264-472753b4fa9c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.058742] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "d6137c80-0c09-4655-b264-472753b4fa9c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.059016] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.059245] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.059423] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.059618] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.059894] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "e5767896-8203-4b18-826f-dcb2fe02268e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.060112] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "092c7673-97fb-4085-852c-04a7c19a73e7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.060338] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "092c7673-97fb-4085-852c-04a7c19a73e7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.060624] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "b438b12e-874a-4883-b606-c28258e5a01a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.060807] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "b438b12e-874a-4883-b606-c28258e5a01a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.061039] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.061219] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.061485] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.061678] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.061837] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63028) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1174.062636] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb50f9c2-167c-42b8-8635-9c1171f52c55 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.065826] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0fb2d2-243d-4a54-bf28-8ea7b626849f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.068516] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4086ada9-dc97-4fc2-bf23-c11613510e7c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.071299] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388fde7f-35c0-44fd-af7e-0f7203910b56 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.073933] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada22427-657b-423b-9e39-6379dc4e25f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.078245] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.213669] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736459, 'name': ReconfigVM_Task, 'duration_secs': 0.407525} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.214511] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 5c63c2bb-4725-4722-98e2-cd5c71944c4e/5c63c2bb-4725-4722-98e2-cd5c71944c4e.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1174.214812] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8bbd4b32-5a1f-43e5-9bf9-4a0c2b57406c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.223463] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Waiting for the task: (returnval){ [ 1174.223463] env[63028]: value = "task-2736460" [ 1174.223463] env[63028]: _type = "Task" [ 1174.223463] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.231464] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736460, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.252909] env[63028]: INFO nova.scheduler.client.report [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleted allocation for migration 58c26ff3-a56c-425c-bc63-1f1d728e3998 [ 1174.347513] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9b53a3-eb1b-44e7-9123-cfdd5ac32a91 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.355392] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3142120-dd6f-46dd-8419-4c2034a9030b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.388980] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46affe4-e244-46f2-a95d-4a7d076cf589 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.397138] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27c91b5-61f4-4a7c-b3d4-3da82e7374d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.411309] env[63028]: DEBUG nova.compute.provider_tree [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1174.490815] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1174.491280] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2eb6e02-fe90-4e1c-a777-83f5dcbd9ed2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.497866] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1174.497866] env[63028]: value = "task-2736461" [ 1174.497866] env[63028]: _type = "Task" [ 1174.497866] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.509049] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1174.509166] env[63028]: DEBUG nova.compute.manager [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1174.510032] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d461df4-4a32-4aef-8748-2190c6b714a5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.515674] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1174.515840] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.516013] env[63028]: DEBUG nova.network.neutron [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1174.581969] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.595100] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "092c7673-97fb-4085-852c-04a7c19a73e7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.535s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.595477] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "d6137c80-0c09-4655-b264-472753b4fa9c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.537s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.596139] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.536s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.601407] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.540s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.603852] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "b438b12e-874a-4883-b606-c28258e5a01a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.543s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.734217] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736460, 'name': Rename_Task, 'duration_secs': 0.148656} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.734559] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1174.734823] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e31575c-0786-4e16-8e2e-228cf951af49 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.741268] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Waiting for the task: (returnval){ [ 1174.741268] env[63028]: value = "task-2736462" [ 1174.741268] env[63028]: _type = "Task" [ 1174.741268] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.752790] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736462, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.761285] env[63028]: DEBUG oslo_concurrency.lockutils [None req-129a564d-abec-4885-a271-64c432fc3918 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.664s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.762292] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.703s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.763741] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36497076-8eb2-4f16-bbf7-5f9ed96e2309 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.915406] env[63028]: DEBUG nova.scheduler.client.report [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1175.249146] env[63028]: DEBUG nova.network.neutron [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updating instance_info_cache with network_info: [{"id": "f16f5758-9834-448c-8002-199fff053deb", "address": "fa:16:3e:95:4e:63", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf16f5758-98", "ovs_interfaceid": "f16f5758-9834-448c-8002-199fff053deb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.253891] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.259953] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736462, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.282085] env[63028]: INFO nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] During sync_power_state the instance has a pending task (deleting). Skip. [ 1175.282085] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.518s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.282085] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.031s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.282085] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.282085] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.282085] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.283220] env[63028]: INFO nova.compute.manager [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Terminating instance [ 1175.420145] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.726s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.428116] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.843s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.428116] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.428116] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63028) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1175.428116] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad019f39-d9e4-430e-8d1c-f10c652f45ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.435831] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b867b8-6cbb-4067-9e67-770f2394a82e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.453802] env[63028]: INFO nova.scheduler.client.report [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Deleted allocations for instance 79f4ef22-a589-4d5c-8832-5d5dcdd55561 [ 1175.455995] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda7d050-1bea-476c-8521-9a6125ca1e6f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.468605] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d05efdf-b0fe-444c-992b-358f308d80c5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.504927] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179275MB free_disk=111GB free_vcpus=48 pci_devices=None {{(pid=63028) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1175.505317] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.505317] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.751368] env[63028]: DEBUG oslo_vmware.api [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736462, 'name': PowerOnVM_Task, 'duration_secs': 0.956797} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.751873] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1175.752117] env[63028]: INFO nova.compute.manager [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Took 8.18 seconds to spawn the instance on the hypervisor. [ 1175.752308] env[63028]: DEBUG nova.compute.manager [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1175.753382] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507bf4ad-3b6d-4ab2-91a1-a6abbb2f4724 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.757222] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1175.787657] env[63028]: DEBUG nova.compute.manager [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1175.787995] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1175.788981] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d06568-1b32-4ec9-aafd-b9cec9dd95c1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.800582] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1175.800582] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b136b66-6351-418f-891b-9a3fea4542ef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.807126] env[63028]: DEBUG oslo_vmware.api [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1175.807126] env[63028]: value = "task-2736463" [ 1175.807126] env[63028]: _type = "Task" [ 1175.807126] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.817385] env[63028]: DEBUG oslo_vmware.api [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736463, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.970949] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a0f01393-835f-469a-9262-6cf53e2e257e tempest-ServersTestJSON-699810364 tempest-ServersTestJSON-699810364-project-member] Lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.119s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.971995] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.913s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.972392] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28830852-4789-47c7-918c-fde839a60ff7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.981995] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b42c43a-a255-45fa-859e-cdbfd1f500ba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.066715] env[63028]: DEBUG nova.compute.manager [req-7d303797-a9e4-4ff5-a584-3fae47973057 req-65082739-fb98-42ea-992a-07f908fdaaca service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Received event network-vif-unplugged-f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1176.066949] env[63028]: DEBUG oslo_concurrency.lockutils [req-7d303797-a9e4-4ff5-a584-3fae47973057 req-65082739-fb98-42ea-992a-07f908fdaaca service nova] Acquiring lock "e5767896-8203-4b18-826f-dcb2fe02268e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.067363] env[63028]: DEBUG oslo_concurrency.lockutils [req-7d303797-a9e4-4ff5-a584-3fae47973057 req-65082739-fb98-42ea-992a-07f908fdaaca service nova] Lock "e5767896-8203-4b18-826f-dcb2fe02268e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.067604] env[63028]: DEBUG oslo_concurrency.lockutils [req-7d303797-a9e4-4ff5-a584-3fae47973057 req-65082739-fb98-42ea-992a-07f908fdaaca service nova] Lock "e5767896-8203-4b18-826f-dcb2fe02268e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.068294] env[63028]: DEBUG nova.compute.manager [req-7d303797-a9e4-4ff5-a584-3fae47973057 req-65082739-fb98-42ea-992a-07f908fdaaca service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] No waiting events found dispatching network-vif-unplugged-f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1176.068294] env[63028]: WARNING nova.compute.manager [req-7d303797-a9e4-4ff5-a584-3fae47973057 req-65082739-fb98-42ea-992a-07f908fdaaca service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Received unexpected event network-vif-unplugged-f16f5758-9834-448c-8002-199fff053deb for instance with vm_state shelved and task_state shelving_offloading. [ 1176.157293] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1176.158257] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4be1769-6b9a-4ba7-9233-14202fbc328a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.168112] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1176.168387] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8a37f30-1231-4977-8e12-1e09259faccf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.231824] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1176.231824] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1176.231824] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleting the datastore file [datastore2] e5767896-8203-4b18-826f-dcb2fe02268e {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1176.231824] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b485201-dfc7-4e41-8c22-47d02ab872e1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.239177] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1176.239177] env[63028]: value = "task-2736465" [ 1176.239177] env[63028]: _type = "Task" [ 1176.239177] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.247665] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736465, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.276331] env[63028]: INFO nova.compute.manager [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Took 14.89 seconds to build instance. [ 1176.315640] env[63028]: DEBUG oslo_vmware.api [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736463, 'name': PowerOffVM_Task, 'duration_secs': 0.189929} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.315911] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1176.316159] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1176.318095] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e129e14a-7d76-4dd5-8929-029762b9cb10 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.380031] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1176.380031] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1176.380031] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleting the datastore file [datastore2] c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1176.380254] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4b3c6bc-5fcb-42ab-b5b2-5d8224303ec2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.386770] env[63028]: DEBUG oslo_vmware.api [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1176.386770] env[63028]: value = "task-2736467" [ 1176.386770] env[63028]: _type = "Task" [ 1176.386770] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.395774] env[63028]: DEBUG oslo_vmware.api [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736467, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.525663] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "79f4ef22-a589-4d5c-8832-5d5dcdd55561" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.554s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.544749] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance d6137c80-0c09-4655-b264-472753b4fa9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1176.544921] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 4ec96b68-2fdb-4150-8d26-53fdf79c8e26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1176.545323] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance e5767896-8203-4b18-826f-dcb2fe02268e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1176.545505] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 092c7673-97fb-4085-852c-04a7c19a73e7 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1176.545634] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance b438b12e-874a-4883-b606-c28258e5a01a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1176.545752] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1176.545867] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1176.545979] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 5c63c2bb-4725-4722-98e2-cd5c71944c4e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1176.546194] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1176.546334] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2176MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1176.689887] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37e511d-bc25-46d1-ac51-ffe5922c9c1e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.702393] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5c7238-efea-4b4d-a7f0-e3e04f125ea8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.736087] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b498b85d-ca33-4f74-8e28-11dfb513c459 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.746988] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbba1712-6440-4ff7-8d2b-36ac789c7d8f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.754859] env[63028]: DEBUG oslo_vmware.api [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736465, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200777} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.755810] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1176.755810] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1176.755810] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1176.765397] env[63028]: DEBUG nova.compute.provider_tree [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1176.775513] env[63028]: INFO nova.scheduler.client.report [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleted allocations for instance e5767896-8203-4b18-826f-dcb2fe02268e [ 1176.778478] env[63028]: DEBUG oslo_concurrency.lockutils [None req-a3046b25-04bd-41d4-9b7e-9d1f4ec87c0d tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.405s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.779110] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.717s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.779110] env[63028]: INFO nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] During sync_power_state the instance has a pending task (spawning). Skip. [ 1176.779110] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.896925] env[63028]: DEBUG oslo_vmware.api [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736467, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205439} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.899061] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1176.899689] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1176.900128] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1176.900286] env[63028]: INFO nova.compute.manager [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1176.901450] env[63028]: DEBUG oslo.service.loopingcall [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1176.901450] env[63028]: DEBUG nova.compute.manager [-] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1176.901450] env[63028]: DEBUG nova.network.neutron [-] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1177.232265] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Acquiring lock "9e3ac23e-16ef-4626-817b-24683fd89b1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.232265] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Lock "9e3ac23e-16ef-4626-817b-24683fd89b1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.268697] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1177.280548] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.735736] env[63028]: DEBUG nova.compute.manager [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1177.774229] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1177.774443] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.269s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.775100] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.495s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.775461] env[63028]: DEBUG nova.objects.instance [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lazy-loading 'resources' on Instance uuid e5767896-8203-4b18-826f-dcb2fe02268e {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1177.844025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Acquiring lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.844025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.844366] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Acquiring lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.844624] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.846159] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.847644] env[63028]: INFO nova.compute.manager [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Terminating instance [ 1177.935616] env[63028]: DEBUG nova.network.neutron [-] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.139160] env[63028]: DEBUG nova.compute.manager [req-43142370-99fb-4bba-9702-d1d4470618a2 req-097ea199-65d3-4056-b167-c6a096409765 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Received event network-changed-f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1178.139160] env[63028]: DEBUG nova.compute.manager [req-43142370-99fb-4bba-9702-d1d4470618a2 req-097ea199-65d3-4056-b167-c6a096409765 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Refreshing instance network info cache due to event network-changed-f16f5758-9834-448c-8002-199fff053deb. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1178.139352] env[63028]: DEBUG oslo_concurrency.lockutils [req-43142370-99fb-4bba-9702-d1d4470618a2 req-097ea199-65d3-4056-b167-c6a096409765 service nova] Acquiring lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.142221] env[63028]: DEBUG oslo_concurrency.lockutils [req-43142370-99fb-4bba-9702-d1d4470618a2 req-097ea199-65d3-4056-b167-c6a096409765 service nova] Acquired lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.142221] env[63028]: DEBUG nova.network.neutron [req-43142370-99fb-4bba-9702-d1d4470618a2 req-097ea199-65d3-4056-b167-c6a096409765 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Refreshing network info cache for port f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1178.265336] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.280162] env[63028]: DEBUG nova.objects.instance [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lazy-loading 'numa_topology' on Instance uuid e5767896-8203-4b18-826f-dcb2fe02268e {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1178.351207] env[63028]: DEBUG nova.compute.manager [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1178.351476] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1178.352463] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf37afa9-dc06-4773-9a89-91023c03699e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.363873] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1178.364019] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e8e3c8d-34dc-4bba-9a77-0cf9a690a67e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.372700] env[63028]: DEBUG oslo_vmware.api [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Waiting for the task: (returnval){ [ 1178.372700] env[63028]: value = "task-2736468" [ 1178.372700] env[63028]: _type = "Task" [ 1178.372700] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.383152] env[63028]: DEBUG oslo_vmware.api [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736468, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.439082] env[63028]: INFO nova.compute.manager [-] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Took 1.54 seconds to deallocate network for instance. [ 1178.782228] env[63028]: DEBUG nova.objects.base [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1178.888581] env[63028]: DEBUG oslo_vmware.api [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736468, 'name': PowerOffVM_Task, 'duration_secs': 0.187921} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.888581] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1178.888708] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1178.888927] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aec20483-200c-461f-9e03-58c925fce7f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.902613] env[63028]: DEBUG nova.network.neutron [req-43142370-99fb-4bba-9702-d1d4470618a2 req-097ea199-65d3-4056-b167-c6a096409765 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updated VIF entry in instance network info cache for port f16f5758-9834-448c-8002-199fff053deb. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1178.902990] env[63028]: DEBUG nova.network.neutron [req-43142370-99fb-4bba-9702-d1d4470618a2 req-097ea199-65d3-4056-b167-c6a096409765 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updating instance_info_cache with network_info: [{"id": "f16f5758-9834-448c-8002-199fff053deb", "address": "fa:16:3e:95:4e:63", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf16f5758-98", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.923826] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1bde25e-1cb9-4cf2-ad4f-251e2d348dd4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.932586] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b492fc-d4b3-4d39-8cd6-3a8f73517c3b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.966804] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.970862] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c162dd2-6d2e-4a08-82c5-9d6f5344ade8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.974899] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1178.975146] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1178.975333] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Deleting the datastore file [datastore1] 5c63c2bb-4725-4722-98e2-cd5c71944c4e {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1178.975589] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06fbb329-be5d-4ca0-bcd1-26196f0dc0ba {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.984188] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad607bab-b3f0-42a7-80fc-d12c4a6496cc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.988403] env[63028]: DEBUG oslo_vmware.api [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Waiting for the task: (returnval){ [ 1178.988403] env[63028]: value = "task-2736470" [ 1178.988403] env[63028]: _type = "Task" [ 1178.988403] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.002255] env[63028]: DEBUG nova.compute.provider_tree [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1179.008435] env[63028]: DEBUG oslo_vmware.api [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736470, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.406358] env[63028]: DEBUG oslo_concurrency.lockutils [req-43142370-99fb-4bba-9702-d1d4470618a2 req-097ea199-65d3-4056-b167-c6a096409765 service nova] Releasing lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1179.406607] env[63028]: DEBUG nova.compute.manager [req-43142370-99fb-4bba-9702-d1d4470618a2 req-097ea199-65d3-4056-b167-c6a096409765 service nova] [instance: c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7] Received event network-vif-deleted-6ecb125b-389c-4dce-8446-368a7298e497 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1179.498442] env[63028]: DEBUG oslo_vmware.api [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Task: {'id': task-2736470, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174823} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.498701] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1179.498892] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1179.499082] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1179.499261] env[63028]: INFO nova.compute.manager [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1179.499499] env[63028]: DEBUG oslo.service.loopingcall [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1179.499686] env[63028]: DEBUG nova.compute.manager [-] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1179.499778] env[63028]: DEBUG nova.network.neutron [-] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1179.505315] env[63028]: DEBUG nova.scheduler.client.report [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1180.010363] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.235s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.012990] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.748s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.014436] env[63028]: INFO nova.compute.claims [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1180.082029] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "e5767896-8203-4b18-826f-dcb2fe02268e" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.159225] env[63028]: DEBUG nova.compute.manager [req-cd268479-eba4-4b94-9f44-1b61f1e6632e req-09ce9501-e7df-48d3-bed4-c03cab3891c6 service nova] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Received event network-vif-deleted-8f7d6b80-4998-4030-85eb-bc1f302e96b3 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1180.159225] env[63028]: INFO nova.compute.manager [req-cd268479-eba4-4b94-9f44-1b61f1e6632e req-09ce9501-e7df-48d3-bed4-c03cab3891c6 service nova] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Neutron deleted interface 8f7d6b80-4998-4030-85eb-bc1f302e96b3; detaching it from the instance and deleting it from the info cache [ 1180.159225] env[63028]: DEBUG nova.network.neutron [req-cd268479-eba4-4b94-9f44-1b61f1e6632e req-09ce9501-e7df-48d3-bed4-c03cab3891c6 service nova] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.210352] env[63028]: DEBUG nova.network.neutron [-] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.524275] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9e5e516c-c3e0-4d72-adb4-2e39cf34e4a2 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "e5767896-8203-4b18-826f-dcb2fe02268e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.122s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.525264] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "e5767896-8203-4b18-826f-dcb2fe02268e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.465s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.525458] env[63028]: INFO nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] During sync_power_state the instance has a pending task (shelving_offloading). Skip. [ 1180.525942] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "e5767896-8203-4b18-826f-dcb2fe02268e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.528689] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "e5767896-8203-4b18-826f-dcb2fe02268e" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.444s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.529046] env[63028]: INFO nova.compute.manager [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Unshelving [ 1180.661878] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-063d1561-fca1-4048-8d4a-7c8dd18ae9a3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.671521] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3f7050-18a0-44c5-adf6-7d81c25c8783 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.700597] env[63028]: DEBUG nova.compute.manager [req-cd268479-eba4-4b94-9f44-1b61f1e6632e req-09ce9501-e7df-48d3-bed4-c03cab3891c6 service nova] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Detach interface failed, port_id=8f7d6b80-4998-4030-85eb-bc1f302e96b3, reason: Instance 5c63c2bb-4725-4722-98e2-cd5c71944c4e could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1180.712301] env[63028]: INFO nova.compute.manager [-] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Took 1.21 seconds to deallocate network for instance. [ 1181.134671] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde7597d-8324-4e49-b6e5-6e0dd6fe6e18 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.142737] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3198ed8e-397a-49ac-a6f4-2a4c786d96bb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.171095] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0602260e-e666-43f6-9e1f-f1723166201f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.178250] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45357a6-5dd8-456d-a5e3-d3e2f5bab7ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.191286] env[63028]: DEBUG nova.compute.provider_tree [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.218510] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.552768] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.695222] env[63028]: DEBUG nova.scheduler.client.report [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1182.200150] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.187s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.200751] env[63028]: DEBUG nova.compute.manager [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1182.203323] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.237s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.203534] env[63028]: DEBUG nova.objects.instance [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'resources' on Instance uuid c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1182.707138] env[63028]: DEBUG nova.compute.utils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1182.711675] env[63028]: DEBUG nova.compute.manager [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1182.711675] env[63028]: DEBUG nova.network.neutron [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1182.749903] env[63028]: DEBUG nova.policy [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '505f89d7c82b4795a89733002c67f36b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e5e7f6381c340d9b307742d4ec6b0ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1182.841768] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68165376-7e8c-4f74-af7c-2022b1045488 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.849848] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6d935f-ca0a-45e7-b44a-051f473e5d0f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.881960] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd0c276-04d3-4298-b9bc-2679097af1ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.891462] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3582e3f8-9d76-483b-8398-54f958f45bd1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.906130] env[63028]: DEBUG nova.compute.provider_tree [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1183.212427] env[63028]: DEBUG nova.compute.manager [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1183.285164] env[63028]: DEBUG nova.network.neutron [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Successfully created port: 26601205-e781-4ca7-852b-4a1f23fdbe75 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1183.410601] env[63028]: DEBUG nova.scheduler.client.report [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1183.915312] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.712s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.917653] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.699s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.918210] env[63028]: DEBUG nova.objects.instance [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Lazy-loading 'resources' on Instance uuid 5c63c2bb-4725-4722-98e2-cd5c71944c4e {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1183.939714] env[63028]: INFO nova.scheduler.client.report [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleted allocations for instance c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7 [ 1184.222074] env[63028]: DEBUG nova.compute.manager [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1184.247699] env[63028]: DEBUG nova.virt.hardware [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1184.247955] env[63028]: DEBUG nova.virt.hardware [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1184.248133] env[63028]: DEBUG nova.virt.hardware [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1184.248319] env[63028]: DEBUG nova.virt.hardware [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1184.248466] env[63028]: DEBUG nova.virt.hardware [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1184.248668] env[63028]: DEBUG nova.virt.hardware [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1184.248895] env[63028]: DEBUG nova.virt.hardware [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1184.249071] env[63028]: DEBUG nova.virt.hardware [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1184.249242] env[63028]: DEBUG nova.virt.hardware [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1184.249407] env[63028]: DEBUG nova.virt.hardware [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1184.249579] env[63028]: DEBUG nova.virt.hardware [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1184.250450] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13903a74-4dee-4b46-86d4-e5e323fcd0f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.258537] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50dd3bee-655e-4ccc-837b-331d17bede0d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.449117] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4128eba6-8c11-46c3-a80d-97d6a632c077 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "c2d7d7f4-0bb0-4407-ac60-9c69b0e39ec7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.169s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.493972] env[63028]: DEBUG oslo_concurrency.lockutils [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "092c7673-97fb-4085-852c-04a7c19a73e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.494227] env[63028]: DEBUG oslo_concurrency.lockutils [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "092c7673-97fb-4085-852c-04a7c19a73e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.494432] env[63028]: DEBUG oslo_concurrency.lockutils [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "092c7673-97fb-4085-852c-04a7c19a73e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.494646] env[63028]: DEBUG oslo_concurrency.lockutils [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "092c7673-97fb-4085-852c-04a7c19a73e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.494823] env[63028]: DEBUG oslo_concurrency.lockutils [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "092c7673-97fb-4085-852c-04a7c19a73e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.496578] env[63028]: INFO nova.compute.manager [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Terminating instance [ 1184.523559] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032d9867-b866-4093-bc9d-4bb71a89c221 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.531548] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a115ceb-8931-45b9-9f39-1e2907117199 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.562608] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3edf3f-611a-421c-bcdb-07525da70f1a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.570341] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dda7d38-172f-4fa9-915a-811e7c99cdb8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.584982] env[63028]: DEBUG nova.compute.provider_tree [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1184.685567] env[63028]: DEBUG nova.compute.manager [req-2a9cc854-ac37-446d-90d2-027517a0d58a req-41d15807-9d03-45ac-b736-1fa631a717f6 service nova] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Received event network-vif-plugged-26601205-e781-4ca7-852b-4a1f23fdbe75 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1184.685799] env[63028]: DEBUG oslo_concurrency.lockutils [req-2a9cc854-ac37-446d-90d2-027517a0d58a req-41d15807-9d03-45ac-b736-1fa631a717f6 service nova] Acquiring lock "9e3ac23e-16ef-4626-817b-24683fd89b1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.686083] env[63028]: DEBUG oslo_concurrency.lockutils [req-2a9cc854-ac37-446d-90d2-027517a0d58a req-41d15807-9d03-45ac-b736-1fa631a717f6 service nova] Lock "9e3ac23e-16ef-4626-817b-24683fd89b1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.686202] env[63028]: DEBUG oslo_concurrency.lockutils [req-2a9cc854-ac37-446d-90d2-027517a0d58a req-41d15807-9d03-45ac-b736-1fa631a717f6 service nova] Lock "9e3ac23e-16ef-4626-817b-24683fd89b1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.686354] env[63028]: DEBUG nova.compute.manager [req-2a9cc854-ac37-446d-90d2-027517a0d58a req-41d15807-9d03-45ac-b736-1fa631a717f6 service nova] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] No waiting events found dispatching network-vif-plugged-26601205-e781-4ca7-852b-4a1f23fdbe75 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1184.686511] env[63028]: WARNING nova.compute.manager [req-2a9cc854-ac37-446d-90d2-027517a0d58a req-41d15807-9d03-45ac-b736-1fa631a717f6 service nova] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Received unexpected event network-vif-plugged-26601205-e781-4ca7-852b-4a1f23fdbe75 for instance with vm_state building and task_state spawning. [ 1184.817386] env[63028]: DEBUG nova.network.neutron [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Successfully updated port: 26601205-e781-4ca7-852b-4a1f23fdbe75 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1185.001730] env[63028]: DEBUG nova.compute.manager [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1185.002072] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1185.002393] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-14557f9f-0da6-4534-b11b-b11da812df8d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.009998] env[63028]: DEBUG oslo_vmware.api [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1185.009998] env[63028]: value = "task-2736471" [ 1185.009998] env[63028]: _type = "Task" [ 1185.009998] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.017864] env[63028]: DEBUG oslo_vmware.api [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.088251] env[63028]: DEBUG nova.scheduler.client.report [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1185.320720] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Acquiring lock "refresh_cache-9e3ac23e-16ef-4626-817b-24683fd89b1d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1185.320959] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Acquired lock "refresh_cache-9e3ac23e-16ef-4626-817b-24683fd89b1d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.321052] env[63028]: DEBUG nova.network.neutron [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1185.519075] env[63028]: DEBUG oslo_vmware.api [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736471, 'name': PowerOffVM_Task, 'duration_secs': 0.185886} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.519342] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1185.519540] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1185.519730] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550871', 'volume_id': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'name': 'volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '092c7673-97fb-4085-852c-04a7c19a73e7', 'attached_at': '2025-02-20T18:04:09.000000', 'detached_at': '', 'volume_id': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'serial': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1185.520464] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9683d73f-e963-462b-85da-cd48c2bbd4a5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.539225] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91418076-7c79-4eb7-bbfd-1e97e22af224 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.545384] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2bca8c-6eca-4e93-8ff0-6b313baaac01 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.562123] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13129d2e-10de-4b8d-9368-837ab2f633d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.575965] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] The volume has not been displaced from its original location: [datastore2] volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75/volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1185.581008] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1185.581271] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00176c3b-262d-4e5d-9b7d-8781e52b40e6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.593908] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.596078] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.043s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.596293] env[63028]: DEBUG nova.objects.instance [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lazy-loading 'pci_requests' on Instance uuid e5767896-8203-4b18-826f-dcb2fe02268e {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1185.602259] env[63028]: DEBUG oslo_vmware.api [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1185.602259] env[63028]: value = "task-2736472" [ 1185.602259] env[63028]: _type = "Task" [ 1185.602259] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.611220] env[63028]: DEBUG oslo_vmware.api [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736472, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.612141] env[63028]: INFO nova.scheduler.client.report [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Deleted allocations for instance 5c63c2bb-4725-4722-98e2-cd5c71944c4e [ 1185.728056] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.728056] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.831603] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "b438b12e-874a-4883-b606-c28258e5a01a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.831603] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "b438b12e-874a-4883-b606-c28258e5a01a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.831603] env[63028]: INFO nova.compute.manager [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Shelving [ 1185.850139] env[63028]: DEBUG nova.network.neutron [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1185.963890] env[63028]: DEBUG nova.network.neutron [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Updating instance_info_cache with network_info: [{"id": "26601205-e781-4ca7-852b-4a1f23fdbe75", "address": "fa:16:3e:1e:df:c6", "network": {"id": "b04f468f-0dad-4e0e-ac6f-aa6f540c5aaf", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1254291045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e5e7f6381c340d9b307742d4ec6b0ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bde2f6cc-fb26-4d71-95a6-57d1ae1c4afd", "external-id": "nsx-vlan-transportzone-206", "segmentation_id": 206, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26601205-e7", "ovs_interfaceid": "26601205-e781-4ca7-852b-4a1f23fdbe75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.099879] env[63028]: DEBUG nova.objects.instance [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lazy-loading 'numa_topology' on Instance uuid e5767896-8203-4b18-826f-dcb2fe02268e {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1186.112718] env[63028]: DEBUG oslo_vmware.api [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736472, 'name': ReconfigVM_Task, 'duration_secs': 0.169581} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.113123] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1186.119532] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34bd82b6-ac14-4502-b792-77053577a65b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.129356] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8558b6d6-1286-489b-a0f2-6f310eb41f1f tempest-ServerAddressesNegativeTestJSON-951822499 tempest-ServerAddressesNegativeTestJSON-951822499-project-member] Lock "5c63c2bb-4725-4722-98e2-cd5c71944c4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.285s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.135652] env[63028]: DEBUG oslo_vmware.api [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1186.135652] env[63028]: value = "task-2736473" [ 1186.135652] env[63028]: _type = "Task" [ 1186.135652] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.145559] env[63028]: DEBUG oslo_vmware.api [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736473, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.230168] env[63028]: DEBUG nova.compute.manager [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1186.466249] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Releasing lock "refresh_cache-9e3ac23e-16ef-4626-817b-24683fd89b1d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1186.466636] env[63028]: DEBUG nova.compute.manager [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Instance network_info: |[{"id": "26601205-e781-4ca7-852b-4a1f23fdbe75", "address": "fa:16:3e:1e:df:c6", "network": {"id": "b04f468f-0dad-4e0e-ac6f-aa6f540c5aaf", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1254291045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e5e7f6381c340d9b307742d4ec6b0ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bde2f6cc-fb26-4d71-95a6-57d1ae1c4afd", "external-id": "nsx-vlan-transportzone-206", "segmentation_id": 206, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26601205-e7", "ovs_interfaceid": "26601205-e781-4ca7-852b-4a1f23fdbe75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1186.467862] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:df:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bde2f6cc-fb26-4d71-95a6-57d1ae1c4afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '26601205-e781-4ca7-852b-4a1f23fdbe75', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1186.474956] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Creating folder: Project (1e5e7f6381c340d9b307742d4ec6b0ed). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1186.475223] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3040e8be-06e9-43fa-970d-c4a1d446f9a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.486406] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Created folder: Project (1e5e7f6381c340d9b307742d4ec6b0ed) in parent group-v550570. [ 1186.486662] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Creating folder: Instances. Parent ref: group-v550891. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1186.487515] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c4ffce6-9262-49f2-8463-c5f2dcf89983 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.496377] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Created folder: Instances in parent group-v550891. [ 1186.496681] env[63028]: DEBUG oslo.service.loopingcall [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1186.496925] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1186.497185] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ab352de-420c-4f76-ae1d-f5a7144b027f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.520127] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1186.520127] env[63028]: value = "task-2736476" [ 1186.520127] env[63028]: _type = "Task" [ 1186.520127] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.527840] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736476, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.602649] env[63028]: INFO nova.compute.claims [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1186.646915] env[63028]: DEBUG oslo_vmware.api [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736473, 'name': ReconfigVM_Task, 'duration_secs': 0.293655} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.648017] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550871', 'volume_id': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'name': 'volume-bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '092c7673-97fb-4085-852c-04a7c19a73e7', 'attached_at': '2025-02-20T18:04:09.000000', 'detached_at': '', 'volume_id': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75', 'serial': 'bc407df1-2bc3-4054-b5d6-b4b6863f2c75'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1186.648017] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1186.648339] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6935c593-857d-4095-b0cb-f1fe104d187b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.655434] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1186.655658] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13e8e575-d20e-4ea8-ad22-c3f07008b365 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.716021] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1186.716021] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1186.716021] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleting the datastore file [datastore2] 092c7673-97fb-4085-852c-04a7c19a73e7 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1186.716021] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07f7d3df-f727-406c-8863-7a1482df3aed {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.720132] env[63028]: DEBUG nova.compute.manager [req-2f4efd90-04f9-47c2-aa62-a21f1953354b req-42dee676-7630-44f6-a522-3cfa950ef27e service nova] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Received event network-changed-26601205-e781-4ca7-852b-4a1f23fdbe75 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1186.721178] env[63028]: DEBUG nova.compute.manager [req-2f4efd90-04f9-47c2-aa62-a21f1953354b req-42dee676-7630-44f6-a522-3cfa950ef27e service nova] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Refreshing instance network info cache due to event network-changed-26601205-e781-4ca7-852b-4a1f23fdbe75. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1186.721178] env[63028]: DEBUG oslo_concurrency.lockutils [req-2f4efd90-04f9-47c2-aa62-a21f1953354b req-42dee676-7630-44f6-a522-3cfa950ef27e service nova] Acquiring lock "refresh_cache-9e3ac23e-16ef-4626-817b-24683fd89b1d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1186.721178] env[63028]: DEBUG oslo_concurrency.lockutils [req-2f4efd90-04f9-47c2-aa62-a21f1953354b req-42dee676-7630-44f6-a522-3cfa950ef27e service nova] Acquired lock "refresh_cache-9e3ac23e-16ef-4626-817b-24683fd89b1d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.721425] env[63028]: DEBUG nova.network.neutron [req-2f4efd90-04f9-47c2-aa62-a21f1953354b req-42dee676-7630-44f6-a522-3cfa950ef27e service nova] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Refreshing network info cache for port 26601205-e781-4ca7-852b-4a1f23fdbe75 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1186.724196] env[63028]: DEBUG oslo_vmware.api [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1186.724196] env[63028]: value = "task-2736478" [ 1186.724196] env[63028]: _type = "Task" [ 1186.724196] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.736790] env[63028]: DEBUG oslo_vmware.api [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736478, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.756894] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.844261] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.844261] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd5261c1-7b68-456c-9b17-3809244fe2ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.849973] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1186.849973] env[63028]: value = "task-2736479" [ 1186.849973] env[63028]: _type = "Task" [ 1186.849973] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.861216] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736479, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.029753] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736476, 'name': CreateVM_Task, 'duration_secs': 0.471686} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.029934] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1187.030664] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.030855] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.031168] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1187.031420] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-265fa7c3-2d91-44f3-a2b3-764ca5dc72d1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.035848] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Waiting for the task: (returnval){ [ 1187.035848] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ee8140-f1d4-4e05-20ce-71490c83a941" [ 1187.035848] env[63028]: _type = "Task" [ 1187.035848] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.043444] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ee8140-f1d4-4e05-20ce-71490c83a941, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.237769] env[63028]: DEBUG oslo_vmware.api [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736478, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079156} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.238057] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1187.238253] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1187.238461] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1187.238674] env[63028]: INFO nova.compute.manager [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Took 2.24 seconds to destroy the instance on the hypervisor. [ 1187.238926] env[63028]: DEBUG oslo.service.loopingcall [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1187.239132] env[63028]: DEBUG nova.compute.manager [-] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1187.239223] env[63028]: DEBUG nova.network.neutron [-] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1187.361957] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736479, 'name': PowerOffVM_Task, 'duration_secs': 0.175326} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.361957] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1187.361957] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770326d2-7f2c-44fa-893b-77d8960c5e2e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.386416] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28451208-31f8-4a95-8ac8-98bc4ffbfbaf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.549835] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52ee8140-f1d4-4e05-20ce-71490c83a941, 'name': SearchDatastore_Task, 'duration_secs': 0.027953} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.550185] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1187.550381] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1187.550615] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.550761] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.550939] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1187.551224] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de80a4ad-257a-4967-83a3-430089ad0feb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.559677] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1187.559913] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1187.560425] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-864ecfaa-11fb-441e-bd10-5601e3175954 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.566133] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Waiting for the task: (returnval){ [ 1187.566133] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a38992-703f-09ff-d49c-dc5bfb1c402b" [ 1187.566133] env[63028]: _type = "Task" [ 1187.566133] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.574078] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a38992-703f-09ff-d49c-dc5bfb1c402b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.629554] env[63028]: DEBUG nova.network.neutron [req-2f4efd90-04f9-47c2-aa62-a21f1953354b req-42dee676-7630-44f6-a522-3cfa950ef27e service nova] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Updated VIF entry in instance network info cache for port 26601205-e781-4ca7-852b-4a1f23fdbe75. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1187.629554] env[63028]: DEBUG nova.network.neutron [req-2f4efd90-04f9-47c2-aa62-a21f1953354b req-42dee676-7630-44f6-a522-3cfa950ef27e service nova] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Updating instance_info_cache with network_info: [{"id": "26601205-e781-4ca7-852b-4a1f23fdbe75", "address": "fa:16:3e:1e:df:c6", "network": {"id": "b04f468f-0dad-4e0e-ac6f-aa6f540c5aaf", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1254291045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e5e7f6381c340d9b307742d4ec6b0ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bde2f6cc-fb26-4d71-95a6-57d1ae1c4afd", "external-id": "nsx-vlan-transportzone-206", "segmentation_id": 206, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26601205-e7", "ovs_interfaceid": "26601205-e781-4ca7-852b-4a1f23fdbe75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.753428] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5043cf-25ad-4138-9357-780315471107 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.761148] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c819d4-97e6-49b4-ba33-b09d11a825de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.793547] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09300a46-f8c1-4f8a-8559-75fc2a4ed988 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.801077] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a520f6d4-fb8b-44a6-b4d4-50ea0d57cdcd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.816393] env[63028]: DEBUG nova.compute.provider_tree [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.899502] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Creating Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1187.899822] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b8dd001f-228b-4bd3-bfd8-f908c5b153dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.908280] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1187.908280] env[63028]: value = "task-2736480" [ 1187.908280] env[63028]: _type = "Task" [ 1187.908280] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.918298] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736480, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.075565] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a38992-703f-09ff-d49c-dc5bfb1c402b, 'name': SearchDatastore_Task, 'duration_secs': 0.010433} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.076403] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8729c339-245b-4b89-84cb-e7f908fac813 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.081463] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Waiting for the task: (returnval){ [ 1188.081463] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c738fe-d399-e729-3ffa-528b9e6eaa75" [ 1188.081463] env[63028]: _type = "Task" [ 1188.081463] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.089132] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c738fe-d399-e729-3ffa-528b9e6eaa75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.131866] env[63028]: DEBUG oslo_concurrency.lockutils [req-2f4efd90-04f9-47c2-aa62-a21f1953354b req-42dee676-7630-44f6-a522-3cfa950ef27e service nova] Releasing lock "refresh_cache-9e3ac23e-16ef-4626-817b-24683fd89b1d" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.321025] env[63028]: DEBUG nova.scheduler.client.report [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1188.369145] env[63028]: DEBUG nova.network.neutron [-] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.418762] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736480, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.593626] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c738fe-d399-e729-3ffa-528b9e6eaa75, 'name': SearchDatastore_Task, 'duration_secs': 0.012303} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.593626] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.594106] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 9e3ac23e-16ef-4626-817b-24683fd89b1d/9e3ac23e-16ef-4626-817b-24683fd89b1d.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1188.594106] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61b1ad67-afc8-4010-8072-4b86b45a45ca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.601157] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Waiting for the task: (returnval){ [ 1188.601157] env[63028]: value = "task-2736481" [ 1188.601157] env[63028]: _type = "Task" [ 1188.601157] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.608715] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736481, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.800633] env[63028]: DEBUG nova.compute.manager [req-573ee5ff-e1dd-48f8-80ec-87243eeaf5ab req-ad882298-3697-41e8-a259-bdca03064bba service nova] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Received event network-vif-deleted-eda2613a-55b1-4516-80ce-192d52a6abe6 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1188.826550] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.230s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.829161] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.072s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.830811] env[63028]: INFO nova.compute.claims [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1188.865302] env[63028]: INFO nova.network.neutron [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updating port f16f5758-9834-448c-8002-199fff053deb with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1188.871679] env[63028]: INFO nova.compute.manager [-] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Took 1.63 seconds to deallocate network for instance. [ 1188.920884] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736480, 'name': CreateSnapshot_Task, 'duration_secs': 0.80677} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.921183] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Created Snapshot of the VM instance {{(pid=63028) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1188.921963] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d330ae0-66b4-425c-9004-ab69eda02023 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.112603] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736481, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484835} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.112900] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 9e3ac23e-16ef-4626-817b-24683fd89b1d/9e3ac23e-16ef-4626-817b-24683fd89b1d.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1189.113321] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1189.113608] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-885c96f2-e20a-4423-937b-523b92e20449 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.120082] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Waiting for the task: (returnval){ [ 1189.120082] env[63028]: value = "task-2736482" [ 1189.120082] env[63028]: _type = "Task" [ 1189.120082] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.128302] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736482, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.419195] env[63028]: INFO nova.compute.manager [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Took 0.55 seconds to detach 1 volumes for instance. [ 1189.421392] env[63028]: DEBUG nova.compute.manager [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Deleting volume: bc407df1-2bc3-4054-b5d6-b4b6863f2c75 {{(pid=63028) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1189.440145] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Creating linked-clone VM from snapshot {{(pid=63028) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1189.440253] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d2df1af1-e6cc-48f4-b346-68e1fe330eaa {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.448408] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1189.448408] env[63028]: value = "task-2736483" [ 1189.448408] env[63028]: _type = "Task" [ 1189.448408] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.456823] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736483, 'name': CloneVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.629377] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736482, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070403} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.629678] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1189.630403] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb80b6b0-98b2-4caa-92e4-ac00d578074e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.652047] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 9e3ac23e-16ef-4626-817b-24683fd89b1d/9e3ac23e-16ef-4626-817b-24683fd89b1d.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1189.652328] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8406c438-c118-44ca-9f90-d73e4857bee9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.670901] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Waiting for the task: (returnval){ [ 1189.670901] env[63028]: value = "task-2736485" [ 1189.670901] env[63028]: _type = "Task" [ 1189.670901] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.678880] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736485, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.960126] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736483, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.962164] env[63028]: DEBUG oslo_concurrency.lockutils [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1189.962992] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9867dceb-adc1-4d2a-bcaf-04b298463b96 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.970342] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b023c077-9f0e-411b-a28e-b966fd543747 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.001846] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62f7749-3942-45ac-9d61-a9705144da19 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.010163] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a448a6-1292-4eea-87aa-b84bee19ebb6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.024010] env[63028]: DEBUG nova.compute.provider_tree [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1190.180652] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736485, 'name': ReconfigVM_Task, 'duration_secs': 0.313794} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.180946] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 9e3ac23e-16ef-4626-817b-24683fd89b1d/9e3ac23e-16ef-4626-817b-24683fd89b1d.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1190.181599] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8a7ec63-7462-411c-8482-bd6342abcb88 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.189055] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Waiting for the task: (returnval){ [ 1190.189055] env[63028]: value = "task-2736486" [ 1190.189055] env[63028]: _type = "Task" [ 1190.189055] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.197118] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736486, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.397499] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.397787] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.397994] env[63028]: DEBUG nova.network.neutron [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.460274] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736483, 'name': CloneVM_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.526964] env[63028]: DEBUG nova.scheduler.client.report [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1190.699742] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736486, 'name': Rename_Task, 'duration_secs': 0.142558} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.700053] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1190.700292] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6aec68c4-ac77-4cd9-b7c2-8e425528019d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.706917] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Waiting for the task: (returnval){ [ 1190.706917] env[63028]: value = "task-2736487" [ 1190.706917] env[63028]: _type = "Task" [ 1190.706917] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.714613] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736487, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.829939] env[63028]: DEBUG nova.compute.manager [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Received event network-vif-plugged-f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1190.830320] env[63028]: DEBUG oslo_concurrency.lockutils [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] Acquiring lock "e5767896-8203-4b18-826f-dcb2fe02268e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.830606] env[63028]: DEBUG oslo_concurrency.lockutils [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] Lock "e5767896-8203-4b18-826f-dcb2fe02268e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.830853] env[63028]: DEBUG oslo_concurrency.lockutils [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] Lock "e5767896-8203-4b18-826f-dcb2fe02268e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.831117] env[63028]: DEBUG nova.compute.manager [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] No waiting events found dispatching network-vif-plugged-f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1190.831364] env[63028]: WARNING nova.compute.manager [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Received unexpected event network-vif-plugged-f16f5758-9834-448c-8002-199fff053deb for instance with vm_state shelved_offloaded and task_state spawning. [ 1190.831613] env[63028]: DEBUG nova.compute.manager [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Received event network-changed-f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1190.831858] env[63028]: DEBUG nova.compute.manager [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Refreshing instance network info cache due to event network-changed-f16f5758-9834-448c-8002-199fff053deb. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1190.832123] env[63028]: DEBUG oslo_concurrency.lockutils [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] Acquiring lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.963315] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736483, 'name': CloneVM_Task, 'duration_secs': 1.462944} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.963582] env[63028]: INFO nova.virt.vmwareapi.vmops [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Created linked-clone VM from snapshot [ 1190.964320] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ec2bf6-b231-4176-977b-331085fe5c18 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.971539] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Uploading image 52137ff1-e088-4c15-85e2-e0e3091166a2 {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1190.997613] env[63028]: DEBUG oslo_vmware.rw_handles [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1190.997613] env[63028]: value = "vm-550895" [ 1190.997613] env[63028]: _type = "VirtualMachine" [ 1190.997613] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1190.997881] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-67bda1a9-be51-4c70-a848-b24a20d10b2d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.004442] env[63028]: DEBUG oslo_vmware.rw_handles [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lease: (returnval){ [ 1191.004442] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528050e9-3dcf-0241-339c-727d396c3464" [ 1191.004442] env[63028]: _type = "HttpNfcLease" [ 1191.004442] env[63028]: } obtained for exporting VM: (result){ [ 1191.004442] env[63028]: value = "vm-550895" [ 1191.004442] env[63028]: _type = "VirtualMachine" [ 1191.004442] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1191.004798] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the lease: (returnval){ [ 1191.004798] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528050e9-3dcf-0241-339c-727d396c3464" [ 1191.004798] env[63028]: _type = "HttpNfcLease" [ 1191.004798] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1191.011277] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1191.011277] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528050e9-3dcf-0241-339c-727d396c3464" [ 1191.011277] env[63028]: _type = "HttpNfcLease" [ 1191.011277] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1191.032466] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.203s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.032762] env[63028]: DEBUG nova.compute.manager [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1191.038204] env[63028]: DEBUG oslo_concurrency.lockutils [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.076s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.038204] env[63028]: DEBUG nova.objects.instance [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lazy-loading 'resources' on Instance uuid 092c7673-97fb-4085-852c-04a7c19a73e7 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.143628] env[63028]: DEBUG nova.network.neutron [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updating instance_info_cache with network_info: [{"id": "f16f5758-9834-448c-8002-199fff053deb", "address": "fa:16:3e:95:4e:63", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf16f5758-98", "ovs_interfaceid": "f16f5758-9834-448c-8002-199fff053deb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.216755] env[63028]: DEBUG oslo_vmware.api [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736487, 'name': PowerOnVM_Task, 'duration_secs': 0.463887} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.216964] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1191.217193] env[63028]: INFO nova.compute.manager [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Took 6.99 seconds to spawn the instance on the hypervisor. [ 1191.217377] env[63028]: DEBUG nova.compute.manager [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1191.218115] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af943a7-252b-45dc-9308-361d031e6068 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.512608] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1191.512608] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528050e9-3dcf-0241-339c-727d396c3464" [ 1191.512608] env[63028]: _type = "HttpNfcLease" [ 1191.512608] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1191.512926] env[63028]: DEBUG oslo_vmware.rw_handles [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1191.512926] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]528050e9-3dcf-0241-339c-727d396c3464" [ 1191.512926] env[63028]: _type = "HttpNfcLease" [ 1191.512926] env[63028]: }. {{(pid=63028) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1191.514925] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742d34ed-9f90-4c0b-accc-ae306fd9055a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.521152] env[63028]: DEBUG oslo_vmware.rw_handles [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5ef95-691f-f337-91c4-9ce68f8e6830/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1191.521325] env[63028]: DEBUG oslo_vmware.rw_handles [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5ef95-691f-f337-91c4-9ce68f8e6830/disk-0.vmdk for reading. {{(pid=63028) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1191.579343] env[63028]: DEBUG nova.compute.utils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1191.585327] env[63028]: DEBUG nova.compute.manager [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1191.585515] env[63028]: DEBUG nova.network.neutron [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1191.618892] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-23aa1d66-d6b9-4a04-ab85-584f1a3198d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.623480] env[63028]: DEBUG nova.policy [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25218cd4756d409c9fee41c970fb2d32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e85128c5c889438bbb1df571b7756c6a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1191.652102] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.656944] env[63028]: DEBUG oslo_concurrency.lockutils [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] Acquired lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.657159] env[63028]: DEBUG nova.network.neutron [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Refreshing network info cache for port f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1191.678350] env[63028]: DEBUG nova.virt.hardware [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='485498f5f48c3bdf581dc842a508bd58',container_format='bare',created_at=2025-02-20T18:04:21Z,direct_url=,disk_format='vmdk',id=792ecd02-edc1-4227-9a1b-93345040c770,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-266816550-shelved',owner='11332c2adbdc41928d4bf084435e2037',properties=ImageMetaProps,protected=,size=31667200,status='active',tags=,updated_at=2025-02-20T18:04:35Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1191.678595] env[63028]: DEBUG nova.virt.hardware [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1191.678757] env[63028]: DEBUG nova.virt.hardware [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1191.678936] env[63028]: DEBUG nova.virt.hardware [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1191.679093] env[63028]: DEBUG nova.virt.hardware [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1191.679246] env[63028]: DEBUG nova.virt.hardware [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1191.679456] env[63028]: DEBUG nova.virt.hardware [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1191.679619] env[63028]: DEBUG nova.virt.hardware [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1191.679787] env[63028]: DEBUG nova.virt.hardware [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1191.679948] env[63028]: DEBUG nova.virt.hardware [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1191.680138] env[63028]: DEBUG nova.virt.hardware [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1191.680991] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8583bdca-4216-4111-bb22-156c11c57ac6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.690729] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785c2a0a-c433-48c0-bf34-9e1073be6715 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.706068] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:4e:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f01bbee7-8b9a-46be-891e-59d8142fb359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f16f5758-9834-448c-8002-199fff053deb', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1191.713199] env[63028]: DEBUG oslo.service.loopingcall [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1191.714208] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1191.714883] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2acefbc-f91d-4d60-8c29-bcd3b9196347 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.717227] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c39f75df-864a-4f62-a882-cc1edd4660f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.741632] env[63028]: INFO nova.compute.manager [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Took 13.50 seconds to build instance. [ 1191.743246] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f94a91-699c-475d-8999-dabeed20246d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.746445] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1191.746445] env[63028]: value = "task-2736489" [ 1191.746445] env[63028]: _type = "Task" [ 1191.746445] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.776716] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4472b471-a068-4ad1-9f82-4fe1c409eac3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.783515] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736489, 'name': CreateVM_Task} progress is 25%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.789382] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844abb60-aca3-4e7c-8a1f-bca0cc2e0151 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.804388] env[63028]: DEBUG nova.compute.provider_tree [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.939154] env[63028]: DEBUG nova.network.neutron [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Successfully created port: a4f28208-8404-4dcc-a133-bd9f94ad027c {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1192.083336] env[63028]: DEBUG nova.compute.manager [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1192.248190] env[63028]: DEBUG oslo_concurrency.lockutils [None req-75e2161c-64b3-4ad9-b7fe-fdf4fadf47ea tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Lock "9e3ac23e-16ef-4626-817b-24683fd89b1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.016s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.262277] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736489, 'name': CreateVM_Task, 'duration_secs': 0.294342} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.262685] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1192.263357] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.263590] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "[datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.264231] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1192.265041] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c0502e1-51f9-4e5e-84ef-54f7ea63b96f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.274251] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1192.274251] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52dbaa29-3a37-d805-1b57-b41d39aa2210" [ 1192.274251] env[63028]: _type = "Task" [ 1192.274251] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.286282] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "[datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.286617] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Processing image 792ecd02-edc1-4227-9a1b-93345040c770 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1192.287089] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770/792ecd02-edc1-4227-9a1b-93345040c770.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.287089] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquired lock "[datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770/792ecd02-edc1-4227-9a1b-93345040c770.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.287217] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1192.287489] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ee35b45-951c-4876-93bc-3c379912e02f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.295120] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1192.295305] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1192.296376] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bf8d230-6b57-45b0-9140-b798e8bfae39 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.301558] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1192.301558] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c58556-a4d1-3f6b-8fd2-3c4b5e83baf9" [ 1192.301558] env[63028]: _type = "Task" [ 1192.301558] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.309649] env[63028]: DEBUG nova.scheduler.client.report [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1192.312925] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52c58556-a4d1-3f6b-8fd2-3c4b5e83baf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.474071] env[63028]: DEBUG nova.network.neutron [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updated VIF entry in instance network info cache for port f16f5758-9834-448c-8002-199fff053deb. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1192.474810] env[63028]: DEBUG nova.network.neutron [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updating instance_info_cache with network_info: [{"id": "f16f5758-9834-448c-8002-199fff053deb", "address": "fa:16:3e:95:4e:63", "network": {"id": "2b335013-49f6-4f84-b6b6-33d71818b2b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2106845332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11332c2adbdc41928d4bf084435e2037", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf16f5758-98", "ovs_interfaceid": "f16f5758-9834-448c-8002-199fff053deb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.812597] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Preparing fetch location {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1192.812982] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Fetch image to [datastore2] OSTACK_IMG_9e500391-77c0-4b6a-97fa-ca074a1545e6/OSTACK_IMG_9e500391-77c0-4b6a-97fa-ca074a1545e6.vmdk {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1192.813128] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Downloading stream optimized image 792ecd02-edc1-4227-9a1b-93345040c770 to [datastore2] OSTACK_IMG_9e500391-77c0-4b6a-97fa-ca074a1545e6/OSTACK_IMG_9e500391-77c0-4b6a-97fa-ca074a1545e6.vmdk on the data store datastore2 as vApp {{(pid=63028) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1192.813277] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Downloading image file data 792ecd02-edc1-4227-9a1b-93345040c770 to the ESX as VM named 'OSTACK_IMG_9e500391-77c0-4b6a-97fa-ca074a1545e6' {{(pid=63028) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1192.815974] env[63028]: DEBUG oslo_concurrency.lockutils [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.778s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.853521] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Acquiring lock "9e3ac23e-16ef-4626-817b-24683fd89b1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.853981] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Lock "9e3ac23e-16ef-4626-817b-24683fd89b1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.854316] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Acquiring lock "9e3ac23e-16ef-4626-817b-24683fd89b1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.854625] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Lock "9e3ac23e-16ef-4626-817b-24683fd89b1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.854865] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Lock "9e3ac23e-16ef-4626-817b-24683fd89b1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.857435] env[63028]: INFO nova.scheduler.client.report [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted allocations for instance 092c7673-97fb-4085-852c-04a7c19a73e7 [ 1192.861659] env[63028]: INFO nova.compute.manager [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Terminating instance [ 1192.900667] env[63028]: DEBUG oslo_vmware.rw_handles [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1192.900667] env[63028]: value = "resgroup-9" [ 1192.900667] env[63028]: _type = "ResourcePool" [ 1192.900667] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1192.901134] env[63028]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-367e2def-0533-4d66-b0a1-672efbc5a9d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.925446] env[63028]: DEBUG oslo_vmware.rw_handles [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lease: (returnval){ [ 1192.925446] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527dc415-74fb-b172-5056-17d3e4a4b810" [ 1192.925446] env[63028]: _type = "HttpNfcLease" [ 1192.925446] env[63028]: } obtained for vApp import into resource pool (val){ [ 1192.925446] env[63028]: value = "resgroup-9" [ 1192.925446] env[63028]: _type = "ResourcePool" [ 1192.925446] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1192.926529] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the lease: (returnval){ [ 1192.926529] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527dc415-74fb-b172-5056-17d3e4a4b810" [ 1192.926529] env[63028]: _type = "HttpNfcLease" [ 1192.926529] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1192.933097] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1192.933097] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527dc415-74fb-b172-5056-17d3e4a4b810" [ 1192.933097] env[63028]: _type = "HttpNfcLease" [ 1192.933097] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1192.977875] env[63028]: DEBUG oslo_concurrency.lockutils [req-d7e90b4f-f135-4def-ba12-5c5bf5e5f4c4 req-4738f8f1-a0bf-4670-9313-3c3f26a44ba0 service nova] Releasing lock "refresh_cache-e5767896-8203-4b18-826f-dcb2fe02268e" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1193.097166] env[63028]: DEBUG nova.compute.manager [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1193.367990] env[63028]: DEBUG nova.compute.manager [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1193.368299] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1193.368906] env[63028]: DEBUG oslo_concurrency.lockutils [None req-787ada8a-f311-4d03-8fcb-45d2db2fbef3 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "092c7673-97fb-4085-852c-04a7c19a73e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.875s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.370546] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062b66ad-c664-49ff-8b8b-4704b0f18de1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.379518] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1193.380259] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b04051f-1922-457a-bbda-808c35a9f2e1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.386372] env[63028]: DEBUG oslo_vmware.api [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Waiting for the task: (returnval){ [ 1193.386372] env[63028]: value = "task-2736491" [ 1193.386372] env[63028]: _type = "Task" [ 1193.386372] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.397311] env[63028]: DEBUG oslo_vmware.api [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736491, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.433891] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1193.433891] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527dc415-74fb-b172-5056-17d3e4a4b810" [ 1193.433891] env[63028]: _type = "HttpNfcLease" [ 1193.433891] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1193.434301] env[63028]: DEBUG oslo_vmware.rw_handles [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1193.434301] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]527dc415-74fb-b172-5056-17d3e4a4b810" [ 1193.434301] env[63028]: _type = "HttpNfcLease" [ 1193.434301] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1193.435207] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e10ee8-51fb-4c48-887d-89a3c90c3061 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.442822] env[63028]: DEBUG oslo_vmware.rw_handles [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522dc19d-ea13-dffe-da6e-641cd5d8ee48/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1193.443125] env[63028]: DEBUG oslo_vmware.rw_handles [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating HTTP connection to write to file with size = 31667200 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522dc19d-ea13-dffe-da6e-641cd5d8ee48/disk-0.vmdk. {{(pid=63028) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1193.504150] env[63028]: DEBUG nova.compute.manager [req-18a72346-c355-4af4-b6b5-5810e3f436fa req-87a851ff-b4ef-4e54-b4b2-10b0b08f4cfd service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Received event network-vif-plugged-a4f28208-8404-4dcc-a133-bd9f94ad027c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1193.504150] env[63028]: DEBUG oslo_concurrency.lockutils [req-18a72346-c355-4af4-b6b5-5810e3f436fa req-87a851ff-b4ef-4e54-b4b2-10b0b08f4cfd service nova] Acquiring lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.504627] env[63028]: DEBUG oslo_concurrency.lockutils [req-18a72346-c355-4af4-b6b5-5810e3f436fa req-87a851ff-b4ef-4e54-b4b2-10b0b08f4cfd service nova] Lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.504832] env[63028]: DEBUG oslo_concurrency.lockutils [req-18a72346-c355-4af4-b6b5-5810e3f436fa req-87a851ff-b4ef-4e54-b4b2-10b0b08f4cfd service nova] Lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.505015] env[63028]: DEBUG nova.compute.manager [req-18a72346-c355-4af4-b6b5-5810e3f436fa req-87a851ff-b4ef-4e54-b4b2-10b0b08f4cfd service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] No waiting events found dispatching network-vif-plugged-a4f28208-8404-4dcc-a133-bd9f94ad027c {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1193.505188] env[63028]: WARNING nova.compute.manager [req-18a72346-c355-4af4-b6b5-5810e3f436fa req-87a851ff-b4ef-4e54-b4b2-10b0b08f4cfd service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Received unexpected event network-vif-plugged-a4f28208-8404-4dcc-a133-bd9f94ad027c for instance with vm_state building and task_state spawning. [ 1193.512598] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f663b04f-cc94-4508-ac1d-9cfcf1e3327c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.611298] env[63028]: DEBUG nova.network.neutron [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Successfully updated port: a4f28208-8404-4dcc-a133-bd9f94ad027c {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1193.901858] env[63028]: DEBUG oslo_vmware.api [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736491, 'name': PowerOffVM_Task, 'duration_secs': 0.240719} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.903359] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1193.903480] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1193.903735] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a72c2fd6-dc66-42d8-b45e-3ffb3db5e21c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.971987] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1193.972317] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1193.972587] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Deleting the datastore file [datastore1] 9e3ac23e-16ef-4626-817b-24683fd89b1d {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1193.972881] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac25c90e-53e9-4df2-989d-233669dd4a32 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.979895] env[63028]: DEBUG oslo_vmware.api [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Waiting for the task: (returnval){ [ 1193.979895] env[63028]: value = "task-2736493" [ 1193.979895] env[63028]: _type = "Task" [ 1193.979895] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.989672] env[63028]: DEBUG oslo_vmware.api [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736493, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.113913] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1194.114192] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.114368] env[63028]: DEBUG nova.network.neutron [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1194.299936] env[63028]: DEBUG nova.virt.hardware [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1194.300216] env[63028]: DEBUG nova.virt.hardware [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1194.300391] env[63028]: DEBUG nova.virt.hardware [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1194.300595] env[63028]: DEBUG nova.virt.hardware [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1194.300757] env[63028]: DEBUG nova.virt.hardware [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1194.300928] env[63028]: DEBUG nova.virt.hardware [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1194.301170] env[63028]: DEBUG nova.virt.hardware [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1194.301333] env[63028]: DEBUG nova.virt.hardware [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1194.301511] env[63028]: DEBUG nova.virt.hardware [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1194.301690] env[63028]: DEBUG nova.virt.hardware [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1194.301892] env[63028]: DEBUG nova.virt.hardware [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1194.302943] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a907e35e-77d5-4ae7-a0f5-b3fedec5540b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.315711] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f44783-e05a-48a1-98b6-5c61e300592b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.490054] env[63028]: DEBUG oslo_vmware.api [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Task: {'id': task-2736493, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17907} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.492346] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1194.492584] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1194.492757] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1194.492989] env[63028]: INFO nova.compute.manager [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1194.493289] env[63028]: DEBUG oslo.service.loopingcall [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1194.493492] env[63028]: DEBUG nova.compute.manager [-] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1194.493611] env[63028]: DEBUG nova.network.neutron [-] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1194.549282] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.549553] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.549767] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.550090] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.550338] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.554132] env[63028]: INFO nova.compute.manager [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Terminating instance [ 1194.661248] env[63028]: DEBUG nova.network.neutron [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1194.667702] env[63028]: DEBUG oslo_vmware.rw_handles [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Completed reading data from the image iterator. {{(pid=63028) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1194.667942] env[63028]: DEBUG oslo_vmware.rw_handles [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522dc19d-ea13-dffe-da6e-641cd5d8ee48/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1194.668850] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f62094-77ea-4b5c-a814-205b78c92251 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.675611] env[63028]: DEBUG oslo_vmware.rw_handles [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522dc19d-ea13-dffe-da6e-641cd5d8ee48/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1194.675802] env[63028]: DEBUG oslo_vmware.rw_handles [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522dc19d-ea13-dffe-da6e-641cd5d8ee48/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1194.676055] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-d968c682-2090-4f67-a329-5b1e74aca3e2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.911902] env[63028]: DEBUG oslo_vmware.rw_handles [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522dc19d-ea13-dffe-da6e-641cd5d8ee48/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1194.911902] env[63028]: INFO nova.virt.vmwareapi.images [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Downloaded image file data 792ecd02-edc1-4227-9a1b-93345040c770 [ 1194.912330] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f9deb0-2eb6-4aec-ab28-8a3c6e4d45a0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.928443] env[63028]: DEBUG nova.network.neutron [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance_info_cache with network_info: [{"id": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "address": "fa:16:3e:2d:98:bb", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4f28208-84", "ovs_interfaceid": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.929669] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-109358e8-7300-4eaf-bc28-fbb2e2c85de2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.953472] env[63028]: INFO nova.virt.vmwareapi.images [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] The imported VM was unregistered [ 1194.955372] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Caching image {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1194.955678] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Creating directory with path [datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770 {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1194.955954] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7e5a129e-1f89-43e1-a530-814658a54735 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.968190] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Created directory with path [datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770 {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1194.968476] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_9e500391-77c0-4b6a-97fa-ca074a1545e6/OSTACK_IMG_9e500391-77c0-4b6a-97fa-ca074a1545e6.vmdk to [datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770/792ecd02-edc1-4227-9a1b-93345040c770.vmdk. {{(pid=63028) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1194.968700] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-4fc60758-e09e-4188-8e3e-31973dc84164 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.977094] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1194.977094] env[63028]: value = "task-2736495" [ 1194.977094] env[63028]: _type = "Task" [ 1194.977094] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.985658] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736495, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.059317] env[63028]: DEBUG nova.compute.manager [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1195.059606] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1195.060609] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c341d72-6718-4585-bd32-8787b1d90438 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.069176] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1195.069428] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44aee830-da0f-47bc-b779-3fdd015ed6ca {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.076431] env[63028]: DEBUG oslo_vmware.api [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1195.076431] env[63028]: value = "task-2736496" [ 1195.076431] env[63028]: _type = "Task" [ 1195.076431] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.086709] env[63028]: DEBUG oslo_vmware.api [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736496, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.362287] env[63028]: DEBUG nova.network.neutron [-] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.433102] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1195.433455] env[63028]: DEBUG nova.compute.manager [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Instance network_info: |[{"id": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "address": "fa:16:3e:2d:98:bb", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4f28208-84", "ovs_interfaceid": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1195.433895] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:98:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4f28208-8404-4dcc-a133-bd9f94ad027c', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1195.441394] env[63028]: DEBUG oslo.service.loopingcall [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1195.442486] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1195.442710] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f23afc9-4bdb-4aec-bff9-71ed7db34c13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.463319] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1195.463319] env[63028]: value = "task-2736497" [ 1195.463319] env[63028]: _type = "Task" [ 1195.463319] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.471262] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736497, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.487225] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736495, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.502528] env[63028]: DEBUG nova.compute.manager [req-2f88066e-397a-4ee0-8688-60384d360c32 req-21c0b869-0ef5-4d2c-829a-bea5d4d68b08 service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Received event network-changed-a4f28208-8404-4dcc-a133-bd9f94ad027c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1195.502732] env[63028]: DEBUG nova.compute.manager [req-2f88066e-397a-4ee0-8688-60384d360c32 req-21c0b869-0ef5-4d2c-829a-bea5d4d68b08 service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Refreshing instance network info cache due to event network-changed-a4f28208-8404-4dcc-a133-bd9f94ad027c. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1195.502939] env[63028]: DEBUG oslo_concurrency.lockutils [req-2f88066e-397a-4ee0-8688-60384d360c32 req-21c0b869-0ef5-4d2c-829a-bea5d4d68b08 service nova] Acquiring lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.503098] env[63028]: DEBUG oslo_concurrency.lockutils [req-2f88066e-397a-4ee0-8688-60384d360c32 req-21c0b869-0ef5-4d2c-829a-bea5d4d68b08 service nova] Acquired lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.503263] env[63028]: DEBUG nova.network.neutron [req-2f88066e-397a-4ee0-8688-60384d360c32 req-21c0b869-0ef5-4d2c-829a-bea5d4d68b08 service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Refreshing network info cache for port a4f28208-8404-4dcc-a133-bd9f94ad027c {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1195.587607] env[63028]: DEBUG oslo_vmware.api [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736496, 'name': PowerOffVM_Task, 'duration_secs': 0.186751} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.587892] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1195.588077] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1195.588341] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36aca70b-7749-4ddd-87c4-9b8adcf06f89 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.650636] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1195.650862] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1195.651159] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleting the datastore file [datastore1] 4ec96b68-2fdb-4150-8d26-53fdf79c8e26 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1195.651467] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58792fa2-346e-4b0b-b5b8-b8ce0fb91811 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.658277] env[63028]: DEBUG oslo_vmware.api [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1195.658277] env[63028]: value = "task-2736499" [ 1195.658277] env[63028]: _type = "Task" [ 1195.658277] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.667461] env[63028]: DEBUG oslo_vmware.api [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736499, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.866291] env[63028]: INFO nova.compute.manager [-] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Took 1.37 seconds to deallocate network for instance. [ 1195.975180] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736497, 'name': CreateVM_Task, 'duration_secs': 0.355305} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.975498] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1195.976201] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.976377] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.976699] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1195.976982] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2838045-0c76-4d56-8dbd-6d969458c6f4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.985053] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1195.985053] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525d80e2-5fd2-58ae-0a9f-ee304d70d582" [ 1195.985053] env[63028]: _type = "Task" [ 1195.985053] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.991723] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736495, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.996456] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525d80e2-5fd2-58ae-0a9f-ee304d70d582, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.172634] env[63028]: DEBUG oslo_vmware.api [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736499, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238698} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.172913] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1196.173326] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1196.173553] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1196.173750] env[63028]: INFO nova.compute.manager [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1196.174034] env[63028]: DEBUG oslo.service.loopingcall [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1196.174470] env[63028]: DEBUG nova.compute.manager [-] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1196.174626] env[63028]: DEBUG nova.network.neutron [-] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1196.324031] env[63028]: DEBUG nova.network.neutron [req-2f88066e-397a-4ee0-8688-60384d360c32 req-21c0b869-0ef5-4d2c-829a-bea5d4d68b08 service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updated VIF entry in instance network info cache for port a4f28208-8404-4dcc-a133-bd9f94ad027c. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1196.324679] env[63028]: DEBUG nova.network.neutron [req-2f88066e-397a-4ee0-8688-60384d360c32 req-21c0b869-0ef5-4d2c-829a-bea5d4d68b08 service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance_info_cache with network_info: [{"id": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "address": "fa:16:3e:2d:98:bb", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4f28208-84", "ovs_interfaceid": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.372973] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.373263] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.373487] env[63028]: DEBUG nova.objects.instance [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Lazy-loading 'resources' on Instance uuid 9e3ac23e-16ef-4626-817b-24683fd89b1d {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.489338] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736495, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.498410] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]525d80e2-5fd2-58ae-0a9f-ee304d70d582, 'name': SearchDatastore_Task, 'duration_secs': 0.014779} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.498762] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.499019] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1196.499275] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.499425] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.499607] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1196.499883] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae3b44de-8d4d-4221-8483-a31f86b80d71 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.509385] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1196.509617] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1196.510379] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d37d1824-4f5c-4179-95b7-f0475a50f5dd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.516800] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1196.516800] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d55fa6-09a6-4229-786f-25b1f8beda1f" [ 1196.516800] env[63028]: _type = "Task" [ 1196.516800] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.525015] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d55fa6-09a6-4229-786f-25b1f8beda1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.827656] env[63028]: DEBUG oslo_concurrency.lockutils [req-2f88066e-397a-4ee0-8688-60384d360c32 req-21c0b869-0ef5-4d2c-829a-bea5d4d68b08 service nova] Releasing lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.827953] env[63028]: DEBUG nova.compute.manager [req-2f88066e-397a-4ee0-8688-60384d360c32 req-21c0b869-0ef5-4d2c-829a-bea5d4d68b08 service nova] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Received event network-vif-deleted-26601205-e781-4ca7-852b-4a1f23fdbe75 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1196.986998] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00173fc-4d21-4dde-a051-e99e63286310 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.996089] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736495, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.998990] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db634d60-0cf9-4d17-814c-9257605d3c2e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.033738] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60416712-d876-4c28-a58d-d3e901acd578 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.044609] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d03fa9-63ca-4edc-940f-eb283ddd1774 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.048620] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52d55fa6-09a6-4229-786f-25b1f8beda1f, 'name': SearchDatastore_Task, 'duration_secs': 0.013148} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.049793] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ac49384-ab7d-4d44-8361-0635c0e0e819 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.062825] env[63028]: DEBUG nova.compute.provider_tree [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1197.068993] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1197.068993] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52395c07-4fba-9a46-0abf-dc183407b875" [ 1197.068993] env[63028]: _type = "Task" [ 1197.068993] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.077405] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52395c07-4fba-9a46-0abf-dc183407b875, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.290151] env[63028]: DEBUG nova.network.neutron [-] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.490433] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736495, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.528718] env[63028]: DEBUG nova.compute.manager [req-241d5ff6-f7a4-43af-99db-d7509c45bb5f req-529b50e0-c667-4f43-ade7-01cc2e0e6464 service nova] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Received event network-vif-deleted-78ea2d63-5ca1-4e37-808c-688a7c0fc30e {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1197.566751] env[63028]: DEBUG nova.scheduler.client.report [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1197.579426] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52395c07-4fba-9a46-0abf-dc183407b875, 'name': SearchDatastore_Task, 'duration_secs': 0.015095} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.579680] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.579925] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4/670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1197.580191] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ecbb71bd-6edd-4e43-88b5-6ae57188ec35 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.587122] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1197.587122] env[63028]: value = "task-2736500" [ 1197.587122] env[63028]: _type = "Task" [ 1197.587122] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.594741] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.792982] env[63028]: INFO nova.compute.manager [-] [instance: 4ec96b68-2fdb-4150-8d26-53fdf79c8e26] Took 1.62 seconds to deallocate network for instance. [ 1197.995062] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736495, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.645247} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.995062] env[63028]: INFO nova.virt.vmwareapi.ds_util [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_9e500391-77c0-4b6a-97fa-ca074a1545e6/OSTACK_IMG_9e500391-77c0-4b6a-97fa-ca074a1545e6.vmdk to [datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770/792ecd02-edc1-4227-9a1b-93345040c770.vmdk. [ 1197.995062] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Cleaning up location [datastore2] OSTACK_IMG_9e500391-77c0-4b6a-97fa-ca074a1545e6 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1197.995062] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_9e500391-77c0-4b6a-97fa-ca074a1545e6 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1197.995062] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b53d48ba-72f6-42b5-a2f9-ef4a300f9f70 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.003563] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1198.003563] env[63028]: value = "task-2736501" [ 1198.003563] env[63028]: _type = "Task" [ 1198.003563] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.014429] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.074614] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.701s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.097453] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736500, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.107133] env[63028]: INFO nova.scheduler.client.report [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Deleted allocations for instance 9e3ac23e-16ef-4626-817b-24683fd89b1d [ 1198.299453] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.299764] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.300032] env[63028]: DEBUG nova.objects.instance [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lazy-loading 'resources' on Instance uuid 4ec96b68-2fdb-4150-8d26-53fdf79c8e26 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1198.515789] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736501, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082137} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.516199] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1198.516520] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Releasing lock "[datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770/792ecd02-edc1-4227-9a1b-93345040c770.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.516766] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770/792ecd02-edc1-4227-9a1b-93345040c770.vmdk to [datastore2] e5767896-8203-4b18-826f-dcb2fe02268e/e5767896-8203-4b18-826f-dcb2fe02268e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1198.517060] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb7260da-c417-4a07-96da-c399bb353034 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.523601] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1198.523601] env[63028]: value = "task-2736502" [ 1198.523601] env[63028]: _type = "Task" [ 1198.523601] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.531273] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736502, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.597853] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736500, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604237} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.598133] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4/670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1198.598348] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1198.598663] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab6cc60e-37e5-44cd-a59e-e15ab637345a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.609943] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1198.609943] env[63028]: value = "task-2736503" [ 1198.609943] env[63028]: _type = "Task" [ 1198.609943] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.616184] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7c329cf8-3b31-420e-ae9b-3580381c8a5f tempest-InstanceActionsNegativeTestJSON-1324201885 tempest-InstanceActionsNegativeTestJSON-1324201885-project-member] Lock "9e3ac23e-16ef-4626-817b-24683fd89b1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.762s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.621124] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736503, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.888041] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6ce137-93ca-40bb-90f8-84e6c756f518 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.895793] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7ba720-6fd0-4cdd-8f55-ae555feffec9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.928619] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd6ed7f-a640-4913-a267-115d6b961074 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.937665] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06813b7-6990-4b52-a0b9-7f4df7161ee6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.952408] env[63028]: DEBUG nova.compute.provider_tree [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.036454] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736502, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.120748] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736503, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08836} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.120981] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1199.121904] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c797ad08-3d93-47e1-b2c7-85096aec8930 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.146024] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4/670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1199.146464] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48d3b8f4-983d-4b87-9c63-7c28d2df8134 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.167600] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1199.167600] env[63028]: value = "task-2736504" [ 1199.167600] env[63028]: _type = "Task" [ 1199.167600] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.177110] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736504, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.456630] env[63028]: DEBUG nova.scheduler.client.report [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1199.534796] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736502, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.678550] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736504, 'name': ReconfigVM_Task, 'duration_secs': 0.354716} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.678739] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4/670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1199.679714] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e2c8bec-b00c-4318-ba1e-adab9d37e532 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.688039] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1199.688039] env[63028]: value = "task-2736505" [ 1199.688039] env[63028]: _type = "Task" [ 1199.688039] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.697044] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736505, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.962317] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.662s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.990761] env[63028]: INFO nova.scheduler.client.report [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted allocations for instance 4ec96b68-2fdb-4150-8d26-53fdf79c8e26 [ 1200.035813] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736502, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.200767] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736505, 'name': Rename_Task, 'duration_secs': 0.176895} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.201748] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1200.201748] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e60b66ac-43e7-4360-ad23-cfd40eac234f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.210499] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1200.210499] env[63028]: value = "task-2736506" [ 1200.210499] env[63028]: _type = "Task" [ 1200.210499] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.219331] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736506, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.499566] env[63028]: DEBUG oslo_concurrency.lockutils [None req-dc974c83-da05-4962-8e99-8cbd6651f433 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "4ec96b68-2fdb-4150-8d26-53fdf79c8e26" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.950s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.536245] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736502, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.725523] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736506, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.036650] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736502, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.224756] env[63028]: DEBUG oslo_vmware.api [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736506, 'name': PowerOnVM_Task, 'duration_secs': 0.524565} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.225155] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1201.225278] env[63028]: INFO nova.compute.manager [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Took 8.13 seconds to spawn the instance on the hypervisor. [ 1201.225459] env[63028]: DEBUG nova.compute.manager [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1201.226263] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7db730-fe1f-4956-b65b-96d0ca22a5e4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.538055] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736502, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.583442} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.538334] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/792ecd02-edc1-4227-9a1b-93345040c770/792ecd02-edc1-4227-9a1b-93345040c770.vmdk to [datastore2] e5767896-8203-4b18-826f-dcb2fe02268e/e5767896-8203-4b18-826f-dcb2fe02268e.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1201.539155] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf36254-2445-41ba-9201-8c44eb90ef75 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.560777] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] e5767896-8203-4b18-826f-dcb2fe02268e/e5767896-8203-4b18-826f-dcb2fe02268e.vmdk or device None with type streamOptimized {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1201.561023] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08e9d293-ebde-4630-82d0-146d54950b7a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.579866] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1201.579866] env[63028]: value = "task-2736508" [ 1201.579866] env[63028]: _type = "Task" [ 1201.579866] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.589189] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736508, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.743372] env[63028]: INFO nova.compute.manager [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Took 15.01 seconds to build instance. [ 1202.091100] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736508, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.241139] env[63028]: DEBUG nova.compute.manager [req-b9679538-3c75-45fb-b195-7edac9ba3066 req-eca70387-ad89-4f8a-86ce-5c03eacf26bd service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Received event network-changed-a4f28208-8404-4dcc-a133-bd9f94ad027c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1202.241407] env[63028]: DEBUG nova.compute.manager [req-b9679538-3c75-45fb-b195-7edac9ba3066 req-eca70387-ad89-4f8a-86ce-5c03eacf26bd service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Refreshing instance network info cache due to event network-changed-a4f28208-8404-4dcc-a133-bd9f94ad027c. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1202.241577] env[63028]: DEBUG oslo_concurrency.lockutils [req-b9679538-3c75-45fb-b195-7edac9ba3066 req-eca70387-ad89-4f8a-86ce-5c03eacf26bd service nova] Acquiring lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1202.241722] env[63028]: DEBUG oslo_concurrency.lockutils [req-b9679538-3c75-45fb-b195-7edac9ba3066 req-eca70387-ad89-4f8a-86ce-5c03eacf26bd service nova] Acquired lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.241894] env[63028]: DEBUG nova.network.neutron [req-b9679538-3c75-45fb-b195-7edac9ba3066 req-eca70387-ad89-4f8a-86ce-5c03eacf26bd service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Refreshing network info cache for port a4f28208-8404-4dcc-a133-bd9f94ad027c {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1202.244828] env[63028]: DEBUG oslo_concurrency.lockutils [None req-adf86c43-6e9c-4142-8088-5c5fd04a98e2 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.517s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.591367] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736508, 'name': ReconfigVM_Task, 'duration_secs': 0.922994} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.591683] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Reconfigured VM instance instance-00000070 to attach disk [datastore2] e5767896-8203-4b18-826f-dcb2fe02268e/e5767896-8203-4b18-826f-dcb2fe02268e.vmdk or device None with type streamOptimized {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1202.592407] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a01146ba-9069-474f-a1f8-6c14da38b564 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.598517] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1202.598517] env[63028]: value = "task-2736509" [ 1202.598517] env[63028]: _type = "Task" [ 1202.598517] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.606723] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736509, 'name': Rename_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.011760] env[63028]: DEBUG nova.network.neutron [req-b9679538-3c75-45fb-b195-7edac9ba3066 req-eca70387-ad89-4f8a-86ce-5c03eacf26bd service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updated VIF entry in instance network info cache for port a4f28208-8404-4dcc-a133-bd9f94ad027c. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1203.012169] env[63028]: DEBUG nova.network.neutron [req-b9679538-3c75-45fb-b195-7edac9ba3066 req-eca70387-ad89-4f8a-86ce-5c03eacf26bd service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance_info_cache with network_info: [{"id": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "address": "fa:16:3e:2d:98:bb", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4f28208-84", "ovs_interfaceid": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.110142] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736509, 'name': Rename_Task, 'duration_secs': 0.211591} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.110513] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1203.110794] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bde71610-b78c-4b3e-a87d-15be235669f8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.117027] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1203.117027] env[63028]: value = "task-2736510" [ 1203.117027] env[63028]: _type = "Task" [ 1203.117027] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.124931] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736510, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.379575] env[63028]: DEBUG oslo_vmware.rw_handles [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5ef95-691f-f337-91c4-9ce68f8e6830/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1203.380506] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f567cefe-1cd8-4ca4-a0b8-3337728be737 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.386724] env[63028]: DEBUG oslo_vmware.rw_handles [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5ef95-691f-f337-91c4-9ce68f8e6830/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1203.386891] env[63028]: ERROR oslo_vmware.rw_handles [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5ef95-691f-f337-91c4-9ce68f8e6830/disk-0.vmdk due to incomplete transfer. [ 1203.387142] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c0f0c5ea-2f69-414c-bbae-1418fafeb674 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.394333] env[63028]: DEBUG oslo_vmware.rw_handles [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5ef95-691f-f337-91c4-9ce68f8e6830/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1203.394541] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Uploaded image 52137ff1-e088-4c15-85e2-e0e3091166a2 to the Glance image server {{(pid=63028) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1203.397173] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Destroying the VM {{(pid=63028) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1203.397433] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-80ee69a1-bbc4-4270-858a-b5bd7d63c743 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.402553] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1203.402553] env[63028]: value = "task-2736511" [ 1203.402553] env[63028]: _type = "Task" [ 1203.402553] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.412196] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736511, 'name': Destroy_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.516257] env[63028]: DEBUG oslo_concurrency.lockutils [req-b9679538-3c75-45fb-b195-7edac9ba3066 req-eca70387-ad89-4f8a-86ce-5c03eacf26bd service nova] Releasing lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1203.629709] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736510, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.912059] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736511, 'name': Destroy_Task} progress is 33%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.126245] env[63028]: DEBUG oslo_vmware.api [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736510, 'name': PowerOnVM_Task, 'duration_secs': 0.522561} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.126503] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1204.236277] env[63028]: DEBUG nova.compute.manager [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1204.236770] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8047a0e0-16db-4bcb-846e-f22f55500cab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.318174] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "d6137c80-0c09-4655-b264-472753b4fa9c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.318501] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "d6137c80-0c09-4655-b264-472753b4fa9c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.318865] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "d6137c80-0c09-4655-b264-472753b4fa9c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.319152] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "d6137c80-0c09-4655-b264-472753b4fa9c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.319383] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "d6137c80-0c09-4655-b264-472753b4fa9c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.321725] env[63028]: INFO nova.compute.manager [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Terminating instance [ 1204.412755] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736511, 'name': Destroy_Task, 'duration_secs': 0.665167} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.413129] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Destroyed the VM [ 1204.413214] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Deleting Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1204.413463] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2c539789-a8f0-44ca-b5e2-d8e64a9fb826 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.419380] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1204.419380] env[63028]: value = "task-2736512" [ 1204.419380] env[63028]: _type = "Task" [ 1204.419380] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.427039] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736512, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.753384] env[63028]: DEBUG oslo_concurrency.lockutils [None req-caaff8e9-4cf1-4dca-abe5-92207b0d8781 tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "e5767896-8203-4b18-826f-dcb2fe02268e" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 24.227s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.825467] env[63028]: DEBUG nova.compute.manager [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1204.825759] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1204.826787] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f0ce85-2017-4b2b-bdb7-e87bfb3e8d32 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.835128] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1204.835371] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb24eb4e-dc2b-42f9-b262-5ef102c7076d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.840824] env[63028]: DEBUG oslo_vmware.api [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1204.840824] env[63028]: value = "task-2736513" [ 1204.840824] env[63028]: _type = "Task" [ 1204.840824] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.848242] env[63028]: DEBUG oslo_vmware.api [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.928811] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736512, 'name': RemoveSnapshot_Task, 'duration_secs': 0.417551} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.929122] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Deleted Snapshot of the VM instance {{(pid=63028) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1204.929421] env[63028]: DEBUG nova.compute.manager [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1204.930217] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b524605e-38c1-4670-81de-5a369f910110 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.350781] env[63028]: DEBUG oslo_vmware.api [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736513, 'name': PowerOffVM_Task, 'duration_secs': 0.169089} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.351072] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1205.351221] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1205.351459] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15198be6-9336-420e-8cb1-beec24784664 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.410151] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1205.410405] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1205.410559] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleting the datastore file [datastore1] d6137c80-0c09-4655-b264-472753b4fa9c {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1205.410825] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-396b3418-60d3-4abf-98f6-dcbf07699ffd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.418386] env[63028]: DEBUG oslo_vmware.api [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for the task: (returnval){ [ 1205.418386] env[63028]: value = "task-2736515" [ 1205.418386] env[63028]: _type = "Task" [ 1205.418386] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.426345] env[63028]: DEBUG oslo_vmware.api [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736515, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.442258] env[63028]: INFO nova.compute.manager [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Shelve offloading [ 1205.928082] env[63028]: DEBUG oslo_vmware.api [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Task: {'id': task-2736515, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122704} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.928368] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1205.928516] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1205.928691] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1205.928870] env[63028]: INFO nova.compute.manager [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1205.929122] env[63028]: DEBUG oslo.service.loopingcall [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1205.929312] env[63028]: DEBUG nova.compute.manager [-] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1205.929400] env[63028]: DEBUG nova.network.neutron [-] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1205.945654] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1205.945907] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eacb7b10-1031-48aa-ad9c-53420130b202 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.952232] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1205.952232] env[63028]: value = "task-2736516" [ 1205.952232] env[63028]: _type = "Task" [ 1205.952232] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.959737] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736516, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.193797] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.194096] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.223237] env[63028]: DEBUG nova.compute.manager [req-4ccb9b8b-c204-499a-9808-d15728cf05db req-ec3e94b9-dfe0-41d8-8c49-097017b16055 service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Received event network-vif-deleted-7d007428-6d28-49a8-aa26-6b6ec99613c2 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1206.223613] env[63028]: INFO nova.compute.manager [req-4ccb9b8b-c204-499a-9808-d15728cf05db req-ec3e94b9-dfe0-41d8-8c49-097017b16055 service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Neutron deleted interface 7d007428-6d28-49a8-aa26-6b6ec99613c2; detaching it from the instance and deleting it from the info cache [ 1206.223758] env[63028]: DEBUG nova.network.neutron [req-4ccb9b8b-c204-499a-9808-d15728cf05db req-ec3e94b9-dfe0-41d8-8c49-097017b16055 service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.464040] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1206.464463] env[63028]: DEBUG nova.compute.manager [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1206.465228] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea64a49a-8854-41e4-aa3d-eaa18e475728 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.471635] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1206.471855] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.472058] env[63028]: DEBUG nova.network.neutron [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1206.697990] env[63028]: DEBUG nova.compute.utils [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1206.709098] env[63028]: DEBUG nova.network.neutron [-] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.726182] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-de814c61-f636-4803-8a59-ff70d4adfdd0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.735262] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff51f790-8be1-4f25-a829-1c9ac86a55b6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.763714] env[63028]: DEBUG nova.compute.manager [req-4ccb9b8b-c204-499a-9808-d15728cf05db req-ec3e94b9-dfe0-41d8-8c49-097017b16055 service nova] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Detach interface failed, port_id=7d007428-6d28-49a8-aa26-6b6ec99613c2, reason: Instance d6137c80-0c09-4655-b264-472753b4fa9c could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1207.188790] env[63028]: DEBUG nova.network.neutron [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updating instance_info_cache with network_info: [{"id": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "address": "fa:16:3e:32:0e:40", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51c6d9df-5b", "ovs_interfaceid": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.200486] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.212009] env[63028]: INFO nova.compute.manager [-] [instance: d6137c80-0c09-4655-b264-472753b4fa9c] Took 1.28 seconds to deallocate network for instance. [ 1207.691703] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1207.718322] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.718720] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.718961] env[63028]: DEBUG nova.objects.instance [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lazy-loading 'resources' on Instance uuid d6137c80-0c09-4655-b264-472753b4fa9c {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1207.992919] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1207.994185] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2702bad4-206f-4ed2-b058-8404ee8d3cb3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.004134] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1208.004450] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-007ef762-38b3-45ed-8d6c-8f7c48c17b06 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.063126] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1208.063420] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1208.063659] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleting the datastore file [datastore2] b438b12e-874a-4883-b606-c28258e5a01a {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1208.063989] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08d12abe-09f8-47ea-9b92-e3bf9103399e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.070457] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1208.070457] env[63028]: value = "task-2736518" [ 1208.070457] env[63028]: _type = "Task" [ 1208.070457] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.079382] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736518, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.251605] env[63028]: DEBUG nova.compute.manager [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Received event network-vif-unplugged-51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1208.251730] env[63028]: DEBUG oslo_concurrency.lockutils [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] Acquiring lock "b438b12e-874a-4883-b606-c28258e5a01a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.251933] env[63028]: DEBUG oslo_concurrency.lockutils [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] Lock "b438b12e-874a-4883-b606-c28258e5a01a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.252338] env[63028]: DEBUG oslo_concurrency.lockutils [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] Lock "b438b12e-874a-4883-b606-c28258e5a01a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.252338] env[63028]: DEBUG nova.compute.manager [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] No waiting events found dispatching network-vif-unplugged-51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1208.252444] env[63028]: WARNING nova.compute.manager [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Received unexpected event network-vif-unplugged-51c6d9df-5b87-40a0-8c2b-5586869a3c0f for instance with vm_state shelved and task_state shelving_offloading. [ 1208.252591] env[63028]: DEBUG nova.compute.manager [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Received event network-changed-51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1208.252745] env[63028]: DEBUG nova.compute.manager [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Refreshing instance network info cache due to event network-changed-51c6d9df-5b87-40a0-8c2b-5586869a3c0f. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1208.252930] env[63028]: DEBUG oslo_concurrency.lockutils [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] Acquiring lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.253082] env[63028]: DEBUG oslo_concurrency.lockutils [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] Acquired lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.253241] env[63028]: DEBUG nova.network.neutron [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Refreshing network info cache for port 51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1208.269331] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.269562] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.269779] env[63028]: INFO nova.compute.manager [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Attaching volume 4e815e23-4cda-4cf7-83ec-ee59e9be7f10 to /dev/sdb [ 1208.302371] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad8f9f4-1004-43b6-ab56-fbe149a4f4ee {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.306320] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37cdd747-6472-4068-bafb-0369c81cce02 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.314790] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2fb1c9-c45e-42cb-91c0-a9ce6416db2d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.318248] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7b0053-ec35-4cc0-baa7-d420068dba59 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.351564] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6b4430-70fa-4486-8f48-ceff4a7d1a9a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.354089] env[63028]: DEBUG nova.virt.block_device [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Updating existing volume attachment record: 3cf46e8f-b763-41dd-9d22-aca6b270f09c {{(pid=63028) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1208.360761] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5457c18b-3df7-4d54-bb68-b4aa4e4106bd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.374193] env[63028]: DEBUG nova.compute.provider_tree [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1208.580936] env[63028]: DEBUG oslo_vmware.api [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736518, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12694} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.581188] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1208.581382] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1208.581562] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1208.602980] env[63028]: INFO nova.scheduler.client.report [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleted allocations for instance b438b12e-874a-4883-b606-c28258e5a01a [ 1208.877774] env[63028]: DEBUG nova.scheduler.client.report [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1208.976613] env[63028]: DEBUG nova.network.neutron [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updated VIF entry in instance network info cache for port 51c6d9df-5b87-40a0-8c2b-5586869a3c0f. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1208.977098] env[63028]: DEBUG nova.network.neutron [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updating instance_info_cache with network_info: [{"id": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "address": "fa:16:3e:32:0e:40", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap51c6d9df-5b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1209.107603] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.383019] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.664s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.386380] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.279s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.386750] env[63028]: DEBUG nova.objects.instance [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'resources' on Instance uuid b438b12e-874a-4883-b606-c28258e5a01a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1209.406853] env[63028]: INFO nova.scheduler.client.report [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Deleted allocations for instance d6137c80-0c09-4655-b264-472753b4fa9c [ 1209.479952] env[63028]: DEBUG oslo_concurrency.lockutils [req-d7e70224-0913-4c34-bbc9-95497d1e4829 req-4e870a41-3396-473f-a611-0d69edc2061a service nova] Releasing lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.891174] env[63028]: DEBUG nova.objects.instance [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'numa_topology' on Instance uuid b438b12e-874a-4883-b606-c28258e5a01a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1209.914037] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9d4dd6b2-de09-423c-b976-3bf945cebc41 tempest-ServerActionsTestOtherA-431602134 tempest-ServerActionsTestOtherA-431602134-project-member] Lock "d6137c80-0c09-4655-b264-472753b4fa9c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.595s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.393991] env[63028]: DEBUG nova.objects.base [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1210.452985] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf977c97-f504-4ab7-bb89-de3073b7a13e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.460139] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a76a8a-3154-4071-9133-16159b94e448 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.490622] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73db5db-1f1e-4258-a93a-6adb714f10f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.498126] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0130488b-0130-417f-8e54-c2e0a59c4eab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.512612] env[63028]: DEBUG nova.compute.provider_tree [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1210.626289] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "b438b12e-874a-4883-b606-c28258e5a01a" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.016428] env[63028]: DEBUG nova.scheduler.client.report [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1211.521971] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.136s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.033930] env[63028]: DEBUG oslo_concurrency.lockutils [None req-ae0a6144-7800-4f89-9008-8ca5f7f7cec1 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "b438b12e-874a-4883-b606-c28258e5a01a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.202s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.034958] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "b438b12e-874a-4883-b606-c28258e5a01a" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.409s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.035250] env[63028]: INFO nova.compute.manager [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Unshelving [ 1212.898212] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Volume attach. Driver type: vmdk {{(pid=63028) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1212.898451] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550899', 'volume_id': '4e815e23-4cda-4cf7-83ec-ee59e9be7f10', 'name': 'volume-4e815e23-4cda-4cf7-83ec-ee59e9be7f10', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc', 'attached_at': '', 'detached_at': '', 'volume_id': '4e815e23-4cda-4cf7-83ec-ee59e9be7f10', 'serial': '4e815e23-4cda-4cf7-83ec-ee59e9be7f10'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1212.899372] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492cf318-2063-4c9a-bcb1-ca3184617c50 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.916084] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f80bd0d-45be-4135-9b50-41f1ded3fbc3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.941309] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] volume-4e815e23-4cda-4cf7-83ec-ee59e9be7f10/volume-4e815e23-4cda-4cf7-83ec-ee59e9be7f10.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1212.942041] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d595131-9fcf-47f9-bfa1-ffc71b2ee92b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.959941] env[63028]: DEBUG oslo_vmware.api [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1212.959941] env[63028]: value = "task-2736521" [ 1212.959941] env[63028]: _type = "Task" [ 1212.959941] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.967897] env[63028]: DEBUG oslo_vmware.api [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736521, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.061661] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.062033] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.062255] env[63028]: DEBUG nova.objects.instance [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'pci_requests' on Instance uuid b438b12e-874a-4883-b606-c28258e5a01a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1213.468924] env[63028]: DEBUG oslo_vmware.api [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736521, 'name': ReconfigVM_Task, 'duration_secs': 0.348113} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.469225] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Reconfigured VM instance instance-00000076 to attach disk [datastore1] volume-4e815e23-4cda-4cf7-83ec-ee59e9be7f10/volume-4e815e23-4cda-4cf7-83ec-ee59e9be7f10.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1213.473876] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12db7323-cf14-4a5b-8a69-ad344c061a0f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.488067] env[63028]: DEBUG oslo_vmware.api [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1213.488067] env[63028]: value = "task-2736522" [ 1213.488067] env[63028]: _type = "Task" [ 1213.488067] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.495464] env[63028]: DEBUG oslo_vmware.api [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736522, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.566578] env[63028]: DEBUG nova.objects.instance [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'numa_topology' on Instance uuid b438b12e-874a-4883-b606-c28258e5a01a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1213.998082] env[63028]: DEBUG oslo_vmware.api [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736522, 'name': ReconfigVM_Task, 'duration_secs': 0.12729} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.998394] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550899', 'volume_id': '4e815e23-4cda-4cf7-83ec-ee59e9be7f10', 'name': 'volume-4e815e23-4cda-4cf7-83ec-ee59e9be7f10', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc', 'attached_at': '', 'detached_at': '', 'volume_id': '4e815e23-4cda-4cf7-83ec-ee59e9be7f10', 'serial': '4e815e23-4cda-4cf7-83ec-ee59e9be7f10'} {{(pid=63028) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1214.069062] env[63028]: INFO nova.compute.claims [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1215.033380] env[63028]: DEBUG nova.objects.instance [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lazy-loading 'flavor' on Instance uuid e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1215.138694] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c672e77d-cbb3-4e0b-bace-d845e65a5153 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.145774] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ec7b0c-428e-4da2-ae18-e3c72f0f8454 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.175300] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4edf6d-0bed-4015-b50c-f6ae4e732fa0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.182654] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a9d572-1463-4695-99bb-b9d20341b5d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.195817] env[63028]: DEBUG nova.compute.provider_tree [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.538606] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c206bc69-4254-4e2a-a04f-f41e4210dfa7 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.269s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.700359] env[63028]: DEBUG nova.scheduler.client.report [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1215.721200] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.721450] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.205457] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.143s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.224653] env[63028]: INFO nova.compute.manager [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Detaching volume 4e815e23-4cda-4cf7-83ec-ee59e9be7f10 [ 1216.235214] env[63028]: INFO nova.network.neutron [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updating port 51c6d9df-5b87-40a0-8c2b-5586869a3c0f with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1216.260117] env[63028]: INFO nova.virt.block_device [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Attempting to driver detach volume 4e815e23-4cda-4cf7-83ec-ee59e9be7f10 from mountpoint /dev/sdb [ 1216.260370] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Volume detach. Driver type: vmdk {{(pid=63028) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1216.260560] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550899', 'volume_id': '4e815e23-4cda-4cf7-83ec-ee59e9be7f10', 'name': 'volume-4e815e23-4cda-4cf7-83ec-ee59e9be7f10', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc', 'attached_at': '', 'detached_at': '', 'volume_id': '4e815e23-4cda-4cf7-83ec-ee59e9be7f10', 'serial': '4e815e23-4cda-4cf7-83ec-ee59e9be7f10'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1216.261798] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899404a9-19da-4648-873b-df5d79257bf7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.283815] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca024ee-a558-4883-8b3e-f8f66c47383b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.291037] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88223707-a05d-43c3-9131-4ef1c8ebf3d8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.310657] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3398c62f-ffe0-43e6-9b14-4f6f893c1274 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.325130] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] The volume has not been displaced from its original location: [datastore1] volume-4e815e23-4cda-4cf7-83ec-ee59e9be7f10/volume-4e815e23-4cda-4cf7-83ec-ee59e9be7f10.vmdk. No consolidation needed. {{(pid=63028) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1216.330273] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Reconfiguring VM instance instance-00000076 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1216.330564] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c430d119-89db-4345-94a2-11d68cf58ad0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.348458] env[63028]: DEBUG oslo_vmware.api [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1216.348458] env[63028]: value = "task-2736523" [ 1216.348458] env[63028]: _type = "Task" [ 1216.348458] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.355953] env[63028]: DEBUG oslo_vmware.api [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736523, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.858510] env[63028]: DEBUG oslo_vmware.api [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736523, 'name': ReconfigVM_Task, 'duration_secs': 0.217342} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.859853] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Reconfigured VM instance instance-00000076 to detach disk 2001 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1216.863528] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d229de52-45f6-4bf1-becc-01cb8153f90f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.879047] env[63028]: DEBUG oslo_vmware.api [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1216.879047] env[63028]: value = "task-2736524" [ 1216.879047] env[63028]: _type = "Task" [ 1216.879047] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.886141] env[63028]: DEBUG oslo_vmware.api [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736524, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.389175] env[63028]: DEBUG oslo_vmware.api [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736524, 'name': ReconfigVM_Task, 'duration_secs': 0.133956} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.389498] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-550899', 'volume_id': '4e815e23-4cda-4cf7-83ec-ee59e9be7f10', 'name': 'volume-4e815e23-4cda-4cf7-83ec-ee59e9be7f10', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc', 'attached_at': '', 'detached_at': '', 'volume_id': '4e815e23-4cda-4cf7-83ec-ee59e9be7f10', 'serial': '4e815e23-4cda-4cf7-83ec-ee59e9be7f10'} {{(pid=63028) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1217.596753] env[63028]: DEBUG nova.compute.manager [req-99cd57f4-1149-4016-bb5e-7cb43009cb5e req-204170d8-2117-4306-8565-9319b70b80cb service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Received event network-vif-plugged-51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1217.596981] env[63028]: DEBUG oslo_concurrency.lockutils [req-99cd57f4-1149-4016-bb5e-7cb43009cb5e req-204170d8-2117-4306-8565-9319b70b80cb service nova] Acquiring lock "b438b12e-874a-4883-b606-c28258e5a01a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.597216] env[63028]: DEBUG oslo_concurrency.lockutils [req-99cd57f4-1149-4016-bb5e-7cb43009cb5e req-204170d8-2117-4306-8565-9319b70b80cb service nova] Lock "b438b12e-874a-4883-b606-c28258e5a01a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.597393] env[63028]: DEBUG oslo_concurrency.lockutils [req-99cd57f4-1149-4016-bb5e-7cb43009cb5e req-204170d8-2117-4306-8565-9319b70b80cb service nova] Lock "b438b12e-874a-4883-b606-c28258e5a01a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.597670] env[63028]: DEBUG nova.compute.manager [req-99cd57f4-1149-4016-bb5e-7cb43009cb5e req-204170d8-2117-4306-8565-9319b70b80cb service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] No waiting events found dispatching network-vif-plugged-51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1217.597803] env[63028]: WARNING nova.compute.manager [req-99cd57f4-1149-4016-bb5e-7cb43009cb5e req-204170d8-2117-4306-8565-9319b70b80cb service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Received unexpected event network-vif-plugged-51c6d9df-5b87-40a0-8c2b-5586869a3c0f for instance with vm_state shelved_offloaded and task_state spawning. [ 1217.676983] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1217.676983] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.677144] env[63028]: DEBUG nova.network.neutron [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1217.929923] env[63028]: DEBUG nova.objects.instance [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lazy-loading 'flavor' on Instance uuid e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1218.362632] env[63028]: DEBUG nova.network.neutron [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updating instance_info_cache with network_info: [{"id": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "address": "fa:16:3e:32:0e:40", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51c6d9df-5b", "ovs_interfaceid": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.866064] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.893730] env[63028]: DEBUG nova.virt.hardware [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='eb6599b41c21c685253605e64082f6e5',container_format='bare',created_at=2025-02-20T18:04:48Z,direct_url=,disk_format='vmdk',id=52137ff1-e088-4c15-85e2-e0e3091166a2,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1971473207-shelved',owner='b4dcaef840f940bda057d0371cdc5adb',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-02-20T18:05:06Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1218.894106] env[63028]: DEBUG nova.virt.hardware [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1218.894354] env[63028]: DEBUG nova.virt.hardware [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1218.894646] env[63028]: DEBUG nova.virt.hardware [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1218.894877] env[63028]: DEBUG nova.virt.hardware [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1218.895119] env[63028]: DEBUG nova.virt.hardware [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1218.895414] env[63028]: DEBUG nova.virt.hardware [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1218.895647] env[63028]: DEBUG nova.virt.hardware [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1218.895895] env[63028]: DEBUG nova.virt.hardware [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1218.896176] env[63028]: DEBUG nova.virt.hardware [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1218.896426] env[63028]: DEBUG nova.virt.hardware [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1218.897349] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8a6185-fbf6-4da6-abf5-0b98a83acc7c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.905414] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2403c88d-fdff-475a-b3ec-af3508662e4b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.919244] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:0e:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4df917f7-847a-4c0e-b0e3-69a52e4a1554', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51c6d9df-5b87-40a0-8c2b-5586869a3c0f', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1218.926626] env[63028]: DEBUG oslo.service.loopingcall [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1218.926863] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1218.927145] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc714052-4592-4dff-84cd-9e65974c3849 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.941288] env[63028]: DEBUG oslo_concurrency.lockutils [None req-7551628f-5a11-45e4-97ba-f18a299f6e74 tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.220s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1218.948492] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1218.948492] env[63028]: value = "task-2736525" [ 1218.948492] env[63028]: _type = "Task" [ 1218.948492] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.960528] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736525, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.461945] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736525, 'name': CreateVM_Task, 'duration_secs': 0.275824} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.462208] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1219.462871] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1219.463863] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "[datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.463863] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1219.463863] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8faa8e03-3656-495b-b5fb-10389eeab00b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.469109] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1219.469109] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e8f283-cc77-ad8c-7978-000c7ad5c09f" [ 1219.469109] env[63028]: _type = "Task" [ 1219.469109] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.477548] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52e8f283-cc77-ad8c-7978-000c7ad5c09f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.621890] env[63028]: DEBUG nova.compute.manager [req-87eac59e-97e6-42be-a89a-f1fe4fa95da9 req-630eb5b6-9adf-4c0f-a9ea-ba4d8d0c7ada service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Received event network-changed-51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1219.622113] env[63028]: DEBUG nova.compute.manager [req-87eac59e-97e6-42be-a89a-f1fe4fa95da9 req-630eb5b6-9adf-4c0f-a9ea-ba4d8d0c7ada service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Refreshing instance network info cache due to event network-changed-51c6d9df-5b87-40a0-8c2b-5586869a3c0f. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1219.622335] env[63028]: DEBUG oslo_concurrency.lockutils [req-87eac59e-97e6-42be-a89a-f1fe4fa95da9 req-630eb5b6-9adf-4c0f-a9ea-ba4d8d0c7ada service nova] Acquiring lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1219.622482] env[63028]: DEBUG oslo_concurrency.lockutils [req-87eac59e-97e6-42be-a89a-f1fe4fa95da9 req-630eb5b6-9adf-4c0f-a9ea-ba4d8d0c7ada service nova] Acquired lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.622643] env[63028]: DEBUG nova.network.neutron [req-87eac59e-97e6-42be-a89a-f1fe4fa95da9 req-630eb5b6-9adf-4c0f-a9ea-ba4d8d0c7ada service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Refreshing network info cache for port 51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1219.963410] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.963773] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.963823] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.964025] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.964211] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.966483] env[63028]: INFO nova.compute.manager [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Terminating instance [ 1219.981235] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "[datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1219.981455] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Processing image 52137ff1-e088-4c15-85e2-e0e3091166a2 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1219.981678] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2/52137ff1-e088-4c15-85e2-e0e3091166a2.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1219.981822] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquired lock "[datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2/52137ff1-e088-4c15-85e2-e0e3091166a2.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.981997] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1219.982245] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cf7dadd-121b-4ad8-8134-e0fb2feeb0da {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.991131] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1219.991305] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1219.991998] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecc28149-2548-4ba4-a905-0697c7f5d7f5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.997501] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1219.997501] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52879d35-60b8-1d2b-1935-bac3be462849" [ 1219.997501] env[63028]: _type = "Task" [ 1219.997501] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.011356] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Preparing fetch location {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1220.011609] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Fetch image to [datastore1] OSTACK_IMG_644a59be-8220-4e0f-8026-600b8886c337/OSTACK_IMG_644a59be-8220-4e0f-8026-600b8886c337.vmdk {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1220.011791] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Downloading stream optimized image 52137ff1-e088-4c15-85e2-e0e3091166a2 to [datastore1] OSTACK_IMG_644a59be-8220-4e0f-8026-600b8886c337/OSTACK_IMG_644a59be-8220-4e0f-8026-600b8886c337.vmdk on the data store datastore1 as vApp {{(pid=63028) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1220.011961] env[63028]: DEBUG nova.virt.vmwareapi.images [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Downloading image file data 52137ff1-e088-4c15-85e2-e0e3091166a2 to the ESX as VM named 'OSTACK_IMG_644a59be-8220-4e0f-8026-600b8886c337' {{(pid=63028) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1220.085959] env[63028]: DEBUG oslo_vmware.rw_handles [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1220.085959] env[63028]: value = "resgroup-9" [ 1220.085959] env[63028]: _type = "ResourcePool" [ 1220.085959] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1220.086246] env[63028]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-32100c18-3a8b-4025-80a5-4d21682e37ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.107122] env[63028]: DEBUG oslo_vmware.rw_handles [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lease: (returnval){ [ 1220.107122] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b07741-7167-a33d-95dd-d51a553d2627" [ 1220.107122] env[63028]: _type = "HttpNfcLease" [ 1220.107122] env[63028]: } obtained for vApp import into resource pool (val){ [ 1220.107122] env[63028]: value = "resgroup-9" [ 1220.107122] env[63028]: _type = "ResourcePool" [ 1220.107122] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1220.107568] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the lease: (returnval){ [ 1220.107568] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b07741-7167-a33d-95dd-d51a553d2627" [ 1220.107568] env[63028]: _type = "HttpNfcLease" [ 1220.107568] env[63028]: } to be ready. {{(pid=63028) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1220.113392] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1220.113392] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b07741-7167-a33d-95dd-d51a553d2627" [ 1220.113392] env[63028]: _type = "HttpNfcLease" [ 1220.113392] env[63028]: } is initializing. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1220.377894] env[63028]: DEBUG nova.network.neutron [req-87eac59e-97e6-42be-a89a-f1fe4fa95da9 req-630eb5b6-9adf-4c0f-a9ea-ba4d8d0c7ada service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updated VIF entry in instance network info cache for port 51c6d9df-5b87-40a0-8c2b-5586869a3c0f. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1220.378289] env[63028]: DEBUG nova.network.neutron [req-87eac59e-97e6-42be-a89a-f1fe4fa95da9 req-630eb5b6-9adf-4c0f-a9ea-ba4d8d0c7ada service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updating instance_info_cache with network_info: [{"id": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "address": "fa:16:3e:32:0e:40", "network": {"id": "49670a42-3caa-4492-8b32-f16f3fe77f4b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1158317332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4dcaef840f940bda057d0371cdc5adb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51c6d9df-5b", "ovs_interfaceid": "51c6d9df-5b87-40a0-8c2b-5586869a3c0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.470429] env[63028]: DEBUG nova.compute.manager [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1220.470690] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1220.471633] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0433aa11-81f2-4424-a763-5e154a519a51 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.479511] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1220.479717] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51e36cc8-19cd-403d-a8d2-f55028062fae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.486714] env[63028]: DEBUG oslo_vmware.api [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1220.486714] env[63028]: value = "task-2736527" [ 1220.486714] env[63028]: _type = "Task" [ 1220.486714] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.493710] env[63028]: DEBUG oslo_vmware.api [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736527, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.615048] env[63028]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1220.615048] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b07741-7167-a33d-95dd-d51a553d2627" [ 1220.615048] env[63028]: _type = "HttpNfcLease" [ 1220.615048] env[63028]: } is ready. {{(pid=63028) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1220.615355] env[63028]: DEBUG oslo_vmware.rw_handles [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1220.615355] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52b07741-7167-a33d-95dd-d51a553d2627" [ 1220.615355] env[63028]: _type = "HttpNfcLease" [ 1220.615355] env[63028]: }. {{(pid=63028) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1220.616084] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e153b7f3-9341-45d5-8b0c-bf5b82645030 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.623279] env[63028]: DEBUG oslo_vmware.rw_handles [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523dfc53-2529-8eee-eba9-010efc090951/disk-0.vmdk from lease info. {{(pid=63028) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1220.623438] env[63028]: DEBUG oslo_vmware.rw_handles [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523dfc53-2529-8eee-eba9-010efc090951/disk-0.vmdk. {{(pid=63028) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1220.690562] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-92b40b97-cb79-4946-964d-c115b8f608f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.880949] env[63028]: DEBUG oslo_concurrency.lockutils [req-87eac59e-97e6-42be-a89a-f1fe4fa95da9 req-630eb5b6-9adf-4c0f-a9ea-ba4d8d0c7ada service nova] Releasing lock "refresh_cache-b438b12e-874a-4883-b606-c28258e5a01a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1220.999342] env[63028]: DEBUG oslo_vmware.api [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736527, 'name': PowerOffVM_Task, 'duration_secs': 0.186035} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.002255] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1221.002386] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1221.002671] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8e58345-3f83-483e-8d11-dccfdacd2829 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.067839] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1221.068111] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1221.068305] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Deleting the datastore file [datastore1] e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1221.068639] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-306a1127-3295-438e-93ad-39e92a3b0c29 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.076754] env[63028]: DEBUG oslo_vmware.api [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for the task: (returnval){ [ 1221.076754] env[63028]: value = "task-2736529" [ 1221.076754] env[63028]: _type = "Task" [ 1221.076754] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.086812] env[63028]: DEBUG oslo_vmware.api [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736529, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.588183] env[63028]: DEBUG oslo_vmware.api [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Task: {'id': task-2736529, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.223623} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.588425] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1221.588612] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1221.588799] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1221.588981] env[63028]: INFO nova.compute.manager [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1221.589246] env[63028]: DEBUG oslo.service.loopingcall [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1221.589432] env[63028]: DEBUG nova.compute.manager [-] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1221.589527] env[63028]: DEBUG nova.network.neutron [-] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1221.775321] env[63028]: DEBUG oslo_vmware.rw_handles [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Completed reading data from the image iterator. {{(pid=63028) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1221.775617] env[63028]: DEBUG oslo_vmware.rw_handles [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523dfc53-2529-8eee-eba9-010efc090951/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1221.776717] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a058e9-fd60-4efb-a1e4-382391ac94e1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.785898] env[63028]: DEBUG oslo_vmware.rw_handles [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523dfc53-2529-8eee-eba9-010efc090951/disk-0.vmdk is in state: ready. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1221.786126] env[63028]: DEBUG oslo_vmware.rw_handles [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523dfc53-2529-8eee-eba9-010efc090951/disk-0.vmdk. {{(pid=63028) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1221.786436] env[63028]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-638f1025-d09d-4587-8fa8-f14a9f9bbfbc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.097469] env[63028]: DEBUG nova.compute.manager [req-6214706b-029c-4614-96c5-88df204b0df0 req-b40cd02a-322e-46f9-8e44-2049f00e53d5 service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Received event network-vif-deleted-4e372880-e224-473d-8bdf-03af99b8a0ac {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1222.097706] env[63028]: INFO nova.compute.manager [req-6214706b-029c-4614-96c5-88df204b0df0 req-b40cd02a-322e-46f9-8e44-2049f00e53d5 service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Neutron deleted interface 4e372880-e224-473d-8bdf-03af99b8a0ac; detaching it from the instance and deleting it from the info cache [ 1222.097706] env[63028]: DEBUG nova.network.neutron [req-6214706b-029c-4614-96c5-88df204b0df0 req-b40cd02a-322e-46f9-8e44-2049f00e53d5 service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.207755] env[63028]: DEBUG oslo_vmware.rw_handles [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523dfc53-2529-8eee-eba9-010efc090951/disk-0.vmdk. {{(pid=63028) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1222.207931] env[63028]: INFO nova.virt.vmwareapi.images [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Downloaded image file data 52137ff1-e088-4c15-85e2-e0e3091166a2 [ 1222.208835] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab03eea-6ba0-4a49-930b-b7ff4eee2387 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.224070] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14a58da1-64c5-42bf-8ae0-0d1254ee3c62 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.246797] env[63028]: INFO nova.virt.vmwareapi.images [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] The imported VM was unregistered [ 1222.249089] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Caching image {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1222.249311] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Creating directory with path [datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2 {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1222.249566] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7e8c863-ea18-42fd-89e8-c0d89b68d030 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.259569] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Created directory with path [datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2 {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1222.259569] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_644a59be-8220-4e0f-8026-600b8886c337/OSTACK_IMG_644a59be-8220-4e0f-8026-600b8886c337.vmdk to [datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2/52137ff1-e088-4c15-85e2-e0e3091166a2.vmdk. {{(pid=63028) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1222.259569] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-2f7c02b6-2a4b-47cb-89d1-45d3ce3febc9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.265340] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1222.265340] env[63028]: value = "task-2736531" [ 1222.265340] env[63028]: _type = "Task" [ 1222.265340] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.272895] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736531, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.576242] env[63028]: DEBUG nova.network.neutron [-] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.600321] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-556180d0-b06f-427e-a3fe-f844dd2bad62 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.612328] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d17112-565f-416a-b6b7-cbed9655934c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.643729] env[63028]: DEBUG nova.compute.manager [req-6214706b-029c-4614-96c5-88df204b0df0 req-b40cd02a-322e-46f9-8e44-2049f00e53d5 service nova] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Detach interface failed, port_id=4e372880-e224-473d-8bdf-03af99b8a0ac, reason: Instance e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1222.777826] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736531, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.079838] env[63028]: INFO nova.compute.manager [-] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Took 1.49 seconds to deallocate network for instance. [ 1223.277922] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736531, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.587190] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.587472] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.587709] env[63028]: DEBUG nova.objects.instance [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lazy-loading 'resources' on Instance uuid e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1223.778066] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736531, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.163395] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933f9411-9ac5-4c76-8890-597338be969f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.174624] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15dff07a-d446-4b4b-9663-02775d9bd546 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.207335] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45569c18-ad67-45e5-b523-3f229a91e29e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.217378] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd3c832-497e-4099-9c3b-551fd6ff0092 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.233631] env[63028]: DEBUG nova.compute.provider_tree [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1224.279746] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736531, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.736692] env[63028]: DEBUG nova.scheduler.client.report [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1224.778860] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736531, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.217947} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.779062] env[63028]: INFO nova.virt.vmwareapi.ds_util [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_644a59be-8220-4e0f-8026-600b8886c337/OSTACK_IMG_644a59be-8220-4e0f-8026-600b8886c337.vmdk to [datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2/52137ff1-e088-4c15-85e2-e0e3091166a2.vmdk. [ 1224.779244] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Cleaning up location [datastore1] OSTACK_IMG_644a59be-8220-4e0f-8026-600b8886c337 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1224.779406] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_644a59be-8220-4e0f-8026-600b8886c337 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1224.779653] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75c5e24e-dab7-47fc-a2f1-ad50f5fae9d6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.786085] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1224.786085] env[63028]: value = "task-2736532" [ 1224.786085] env[63028]: _type = "Task" [ 1224.786085] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.793125] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736532, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.241336] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.654s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.258820] env[63028]: INFO nova.scheduler.client.report [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Deleted allocations for instance e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc [ 1225.296280] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736532, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.04516} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.296585] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1225.296722] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Releasing lock "[datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2/52137ff1-e088-4c15-85e2-e0e3091166a2.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1225.296960] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2/52137ff1-e088-4c15-85e2-e0e3091166a2.vmdk to [datastore1] b438b12e-874a-4883-b606-c28258e5a01a/b438b12e-874a-4883-b606-c28258e5a01a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1225.297249] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bec474a-07ca-4924-b6aa-662aadca31a4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.303815] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1225.303815] env[63028]: value = "task-2736533" [ 1225.303815] env[63028]: _type = "Task" [ 1225.303815] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.313177] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736533, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.766427] env[63028]: DEBUG oslo_concurrency.lockutils [None req-8a0ec3ed-fe6b-47ed-ac63-59654596b16e tempest-AttachVolumeNegativeTest-1243436832 tempest-AttachVolumeNegativeTest-1243436832-project-member] Lock "e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.803s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.816430] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736533, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.316415] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736533, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.816918] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736533, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.318078] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736533, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.817568] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736533, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.245688} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.817835] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/52137ff1-e088-4c15-85e2-e0e3091166a2/52137ff1-e088-4c15-85e2-e0e3091166a2.vmdk to [datastore1] b438b12e-874a-4883-b606-c28258e5a01a/b438b12e-874a-4883-b606-c28258e5a01a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1227.818613] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39031cc7-2f41-4bd7-a762-a26dcf78131f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.840109] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] b438b12e-874a-4883-b606-c28258e5a01a/b438b12e-874a-4883-b606-c28258e5a01a.vmdk or device None with type streamOptimized {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1227.840344] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3653a359-039f-4e70-8ab8-1b20fbad078e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.859815] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1227.859815] env[63028]: value = "task-2736535" [ 1227.859815] env[63028]: _type = "Task" [ 1227.859815] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.867423] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736535, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.369694] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736535, 'name': ReconfigVM_Task, 'duration_secs': 0.290451} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.371259] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Reconfigured VM instance instance-00000074 to attach disk [datastore1] b438b12e-874a-4883-b606-c28258e5a01a/b438b12e-874a-4883-b606-c28258e5a01a.vmdk or device None with type streamOptimized {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1228.371259] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df3464b6-d522-4207-82de-70269d525e8f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.376880] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1228.376880] env[63028]: value = "task-2736536" [ 1228.376880] env[63028]: _type = "Task" [ 1228.376880] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.383957] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736536, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.886243] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736536, 'name': Rename_Task, 'duration_secs': 0.135519} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.886545] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1228.886796] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c1eb740-0d75-4636-af38-6253aa2682ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.893850] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1228.893850] env[63028]: value = "task-2736537" [ 1228.893850] env[63028]: _type = "Task" [ 1228.893850] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.901229] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736537, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.403877] env[63028]: DEBUG oslo_vmware.api [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736537, 'name': PowerOnVM_Task, 'duration_secs': 0.440839} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.404345] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1229.503274] env[63028]: DEBUG nova.compute.manager [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1229.504202] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24415d9c-ebe5-48f9-9e3f-9658e22e2f5e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.021694] env[63028]: DEBUG oslo_concurrency.lockutils [None req-cea7c903-d775-48eb-94dd-681b56bf20d2 tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "b438b12e-874a-4883-b606-c28258e5a01a" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 17.987s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.379830] env[63028]: DEBUG oslo_concurrency.lockutils [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "b438b12e-874a-4883-b606-c28258e5a01a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.380117] env[63028]: DEBUG oslo_concurrency.lockutils [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "b438b12e-874a-4883-b606-c28258e5a01a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.380339] env[63028]: DEBUG oslo_concurrency.lockutils [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "b438b12e-874a-4883-b606-c28258e5a01a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.380528] env[63028]: DEBUG oslo_concurrency.lockutils [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "b438b12e-874a-4883-b606-c28258e5a01a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.380702] env[63028]: DEBUG oslo_concurrency.lockutils [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "b438b12e-874a-4883-b606-c28258e5a01a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.382737] env[63028]: INFO nova.compute.manager [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Terminating instance [ 1230.886425] env[63028]: DEBUG nova.compute.manager [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1230.886794] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1230.887606] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb8a582-2dd6-4e8b-8226-a2677cb7b73d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.895718] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1230.895892] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62c78082-2bb9-4e12-8309-1700215166a6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.901790] env[63028]: DEBUG oslo_vmware.api [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1230.901790] env[63028]: value = "task-2736539" [ 1230.901790] env[63028]: _type = "Task" [ 1230.901790] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.910050] env[63028]: DEBUG oslo_vmware.api [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736539, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.411736] env[63028]: DEBUG oslo_vmware.api [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736539, 'name': PowerOffVM_Task, 'duration_secs': 0.20187} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.411993] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1231.412183] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1231.412431] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4e7d54f-ea0b-4d36-ab9b-917ffd4bd0f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.473482] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1231.473697] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1231.473879] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleting the datastore file [datastore1] b438b12e-874a-4883-b606-c28258e5a01a {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1231.474164] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9dbd1093-15ed-4fd1-89c0-d15923996812 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.480523] env[63028]: DEBUG oslo_vmware.api [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for the task: (returnval){ [ 1231.480523] env[63028]: value = "task-2736541" [ 1231.480523] env[63028]: _type = "Task" [ 1231.480523] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.487708] env[63028]: DEBUG oslo_vmware.api [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736541, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.991098] env[63028]: DEBUG oslo_vmware.api [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Task: {'id': task-2736541, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146934} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.991098] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1231.991098] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1231.991098] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1231.991098] env[63028]: INFO nova.compute.manager [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1231.991797] env[63028]: DEBUG oslo.service.loopingcall [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1231.991797] env[63028]: DEBUG nova.compute.manager [-] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1231.991797] env[63028]: DEBUG nova.network.neutron [-] [instance: b438b12e-874a-4883-b606-c28258e5a01a] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1232.415204] env[63028]: DEBUG nova.compute.manager [req-123f1e4f-af87-45bf-a390-2c386eb491b1 req-cee7bbc9-0916-4e55-9d26-4aa7eaa8f5d7 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Received event network-vif-deleted-51c6d9df-5b87-40a0-8c2b-5586869a3c0f {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1232.415438] env[63028]: INFO nova.compute.manager [req-123f1e4f-af87-45bf-a390-2c386eb491b1 req-cee7bbc9-0916-4e55-9d26-4aa7eaa8f5d7 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Neutron deleted interface 51c6d9df-5b87-40a0-8c2b-5586869a3c0f; detaching it from the instance and deleting it from the info cache [ 1232.415997] env[63028]: DEBUG nova.network.neutron [req-123f1e4f-af87-45bf-a390-2c386eb491b1 req-cee7bbc9-0916-4e55-9d26-4aa7eaa8f5d7 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.903139] env[63028]: DEBUG nova.network.neutron [-] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.918132] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3aaa0305-1988-4e9a-81e4-0b030e654173 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.927640] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17afb64-ea99-4a15-956a-7ddfeec510db {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.955985] env[63028]: DEBUG nova.compute.manager [req-123f1e4f-af87-45bf-a390-2c386eb491b1 req-cee7bbc9-0916-4e55-9d26-4aa7eaa8f5d7 service nova] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Detach interface failed, port_id=51c6d9df-5b87-40a0-8c2b-5586869a3c0f, reason: Instance b438b12e-874a-4883-b606-c28258e5a01a could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1233.405754] env[63028]: INFO nova.compute.manager [-] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Took 1.41 seconds to deallocate network for instance. [ 1233.912088] env[63028]: DEBUG oslo_concurrency.lockutils [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.912374] env[63028]: DEBUG oslo_concurrency.lockutils [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.912599] env[63028]: DEBUG nova.objects.instance [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lazy-loading 'resources' on Instance uuid b438b12e-874a-4883-b606-c28258e5a01a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1234.467386] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426fa97a-ba96-4a26-8fc0-172d21ff4e9a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.474972] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47686b5c-fb69-4537-928f-bb690d54cba1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.504647] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3d4dc4-2b9c-4d14-a4be-a0557eb3872b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.511643] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f3d8a6-92b1-4917-b01c-31e950afe85b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.524537] env[63028]: DEBUG nova.compute.provider_tree [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1235.028053] env[63028]: DEBUG nova.scheduler.client.report [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1235.532839] env[63028]: DEBUG oslo_concurrency.lockutils [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.553658] env[63028]: INFO nova.scheduler.client.report [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Deleted allocations for instance b438b12e-874a-4883-b606-c28258e5a01a [ 1236.063362] env[63028]: DEBUG oslo_concurrency.lockutils [None req-85567cb0-be14-4c1f-9f7c-3386b03f283a tempest-ServerActionsTestOtherB-816222261 tempest-ServerActionsTestOtherB-816222261-project-member] Lock "b438b12e-874a-4883-b606-c28258e5a01a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.683s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.777369] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.777728] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.283272] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.283589] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.283658] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.283774] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.284019] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.284183] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.284659] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63028) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1238.284659] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.788012] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.788305] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.788413] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.788566] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63028) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1238.789471] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a4cfa5-0a64-4abd-86f5-79df5d222bd1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.797693] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da22138-3ffc-4da1-beb5-d8749d593ffb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.811284] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7a4724-f4e5-4ba2-8472-9a4ad142da32 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.817650] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2024e8-f3b7-41a5-9b9f-332998ea7d7a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.846110] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179783MB free_disk=111GB free_vcpus=48 pci_devices=None {{(pid=63028) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1238.846280] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.846451] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.852968] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=63028) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 1239.867410] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance e5767896-8203-4b18-826f-dcb2fe02268e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1239.867567] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Migration 15573750-a330-4590-ae47-c7d9a85c10f2 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1239.867692] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1239.867869] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1239.868016] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1239.915544] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20204f8f-5459-4d76-957d-5246b0f19b67 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.923334] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db8935f-df01-4dba-a173-ec66ae303206 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.952326] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd0bff1-9aee-4437-8994-7d2098db64f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.959447] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770f42ee-3b29-4c82-818a-678a01e2ed6b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.972537] env[63028]: DEBUG nova.compute.provider_tree [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1240.250634] env[63028]: DEBUG nova.compute.manager [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Stashing vm_state: active {{(pid=63028) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1240.475504] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1240.773271] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.980510] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1240.980927] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.134s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.981071] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.208s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.150590] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "e5767896-8203-4b18-826f-dcb2fe02268e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.150885] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "e5767896-8203-4b18-826f-dcb2fe02268e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.151176] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "e5767896-8203-4b18-826f-dcb2fe02268e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.151419] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "e5767896-8203-4b18-826f-dcb2fe02268e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.151600] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "e5767896-8203-4b18-826f-dcb2fe02268e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.153597] env[63028]: INFO nova.compute.manager [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Terminating instance [ 1241.485954] env[63028]: INFO nova.compute.claims [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1241.657842] env[63028]: DEBUG nova.compute.manager [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1241.658103] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1241.658946] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50342b2b-59d6-4532-a866-175277df457c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.666841] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1241.667091] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-220897c1-f3f1-4b34-8213-9d815d7105ac {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.673342] env[63028]: DEBUG oslo_vmware.api [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1241.673342] env[63028]: value = "task-2736544" [ 1241.673342] env[63028]: _type = "Task" [ 1241.673342] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.682575] env[63028]: DEBUG oslo_vmware.api [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.992674] env[63028]: INFO nova.compute.resource_tracker [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating resource usage from migration 15573750-a330-4590-ae47-c7d9a85c10f2 [ 1242.049614] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a4cbc2-5a4f-4e6a-9da1-5cfc9d701567 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.056984] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb80bb5c-1732-43fb-8996-bb0b005426d9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.087284] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8134a2a0-c866-4696-8371-4c9787595d29 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.094195] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fef4107-664b-4cbb-8688-6fde4b3051ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.106812] env[63028]: DEBUG nova.compute.provider_tree [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1242.183197] env[63028]: DEBUG oslo_vmware.api [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736544, 'name': PowerOffVM_Task, 'duration_secs': 0.233226} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.183410] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1242.183579] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1242.183824] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eac255ce-a551-4e5c-8c6c-de55dfe5cf55 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.240509] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1242.240750] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Deleting contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1242.240909] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleting the datastore file [datastore2] e5767896-8203-4b18-826f-dcb2fe02268e {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1242.241203] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d3468d2-bb41-422a-b711-54b443cc865c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.248266] env[63028]: DEBUG oslo_vmware.api [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for the task: (returnval){ [ 1242.248266] env[63028]: value = "task-2736546" [ 1242.248266] env[63028]: _type = "Task" [ 1242.248266] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.256248] env[63028]: DEBUG oslo_vmware.api [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.609772] env[63028]: DEBUG nova.scheduler.client.report [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1242.758355] env[63028]: DEBUG oslo_vmware.api [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Task: {'id': task-2736546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139208} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.758603] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1242.758795] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Deleted contents of the VM from datastore datastore2 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1242.758973] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1242.759164] env[63028]: INFO nova.compute.manager [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1242.759405] env[63028]: DEBUG oslo.service.loopingcall [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1242.759586] env[63028]: DEBUG nova.compute.manager [-] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1242.759669] env[63028]: DEBUG nova.network.neutron [-] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1243.114718] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.134s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.115088] env[63028]: INFO nova.compute.manager [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Migrating [ 1243.188247] env[63028]: DEBUG nova.compute.manager [req-9bc72bff-c8db-4f58-86de-d63eed65d32e req-de4212d1-a130-4b24-b699-6728cbea1276 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Received event network-vif-deleted-f16f5758-9834-448c-8002-199fff053deb {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1243.188465] env[63028]: INFO nova.compute.manager [req-9bc72bff-c8db-4f58-86de-d63eed65d32e req-de4212d1-a130-4b24-b699-6728cbea1276 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Neutron deleted interface f16f5758-9834-448c-8002-199fff053deb; detaching it from the instance and deleting it from the info cache [ 1243.188638] env[63028]: DEBUG nova.network.neutron [req-9bc72bff-c8db-4f58-86de-d63eed65d32e req-de4212d1-a130-4b24-b699-6728cbea1276 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.629681] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1243.629866] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.630085] env[63028]: DEBUG nova.network.neutron [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1243.670716] env[63028]: DEBUG nova.network.neutron [-] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.691908] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c75af032-b017-4137-905a-2d4f64693938 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.703735] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151e6b24-0885-4a68-8b05-f9129344fe17 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.728133] env[63028]: DEBUG nova.compute.manager [req-9bc72bff-c8db-4f58-86de-d63eed65d32e req-de4212d1-a130-4b24-b699-6728cbea1276 service nova] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Detach interface failed, port_id=f16f5758-9834-448c-8002-199fff053deb, reason: Instance e5767896-8203-4b18-826f-dcb2fe02268e could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1244.174365] env[63028]: INFO nova.compute.manager [-] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Took 1.41 seconds to deallocate network for instance. [ 1244.342559] env[63028]: DEBUG nova.network.neutron [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance_info_cache with network_info: [{"id": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "address": "fa:16:3e:2d:98:bb", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4f28208-84", "ovs_interfaceid": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.680758] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.681068] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.681317] env[63028]: DEBUG nova.objects.instance [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lazy-loading 'resources' on Instance uuid e5767896-8203-4b18-826f-dcb2fe02268e {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1244.845570] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1245.237502] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5db4363-0e91-48fa-b81c-dfe92af56765 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.245080] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658714e3-3971-4d73-bf0b-368c95b7f31c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.273710] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba7f858-06b3-4e1d-a3ba-6d34fcb94c22 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.280382] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba59521b-a6fa-4bd6-9a52-07460af2e8e1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.294211] env[63028]: DEBUG nova.compute.provider_tree [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1245.797663] env[63028]: DEBUG nova.scheduler.client.report [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1246.303033] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.621s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.319542] env[63028]: INFO nova.scheduler.client.report [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Deleted allocations for instance e5767896-8203-4b18-826f-dcb2fe02268e [ 1246.358975] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df9d661-7147-4cb9-ad99-9d02d35554ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.377808] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance '670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4' progress to 0 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1246.826975] env[63028]: DEBUG oslo_concurrency.lockutils [None req-55830c41-1575-490a-9f30-46a656dfa40a tempest-AttachVolumeShelveTestJSON-1261217514 tempest-AttachVolumeShelveTestJSON-1261217514-project-member] Lock "e5767896-8203-4b18-826f-dcb2fe02268e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.676s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.883129] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1246.883442] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c983e719-f09e-4e1d-86e4-6a20ea6b9ae7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.890583] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1246.890583] env[63028]: value = "task-2736547" [ 1246.890583] env[63028]: _type = "Task" [ 1246.890583] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.897859] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.401088] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736547, 'name': PowerOffVM_Task, 'duration_secs': 0.213781} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.401445] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1247.401540] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance '670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4' progress to 17 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1247.908334] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1247.908603] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1247.908764] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1247.908945] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1247.909199] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1247.909485] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1247.909779] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1247.909962] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1247.910152] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1247.910372] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1247.910673] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1247.915647] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4a8876b-6bff-42f7-a2da-996c150a02e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.930539] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1247.930539] env[63028]: value = "task-2736548" [ 1247.930539] env[63028]: _type = "Task" [ 1247.930539] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.939257] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736548, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.442018] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736548, 'name': ReconfigVM_Task, 'duration_secs': 0.179949} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.442421] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance '670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4' progress to 33 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1248.948737] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1248.949026] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1248.949193] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1248.949375] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1248.949520] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1248.949663] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1248.949864] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1248.950031] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1248.950204] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1248.950367] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1248.950537] env[63028]: DEBUG nova.virt.hardware [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1248.955833] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1248.956137] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-865a4a4a-7f33-4b4e-91dc-1bc1468352f2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.974043] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1248.974043] env[63028]: value = "task-2736550" [ 1248.974043] env[63028]: _type = "Task" [ 1248.974043] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.981652] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736550, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.483743] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736550, 'name': ReconfigVM_Task, 'duration_secs': 0.169194} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.484056] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1249.484862] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699db660-61b0-4214-8079-3999fcf03d89 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.506369] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4/670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1249.506606] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-805af5fa-f234-427d-b24b-4fc7988e4351 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.524652] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1249.524652] env[63028]: value = "task-2736551" [ 1249.524652] env[63028]: _type = "Task" [ 1249.524652] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.531886] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736551, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.034782] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736551, 'name': ReconfigVM_Task, 'duration_secs': 0.249405} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.035119] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4/670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1250.035387] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance '670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4' progress to 50 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1250.542229] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d599b85-8a2d-4b82-a4a7-64ea4f500fe6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.560572] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d89f30e-c832-426f-9c9b-ced6dc6541ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.577182] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance '670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4' progress to 67 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1251.117913] env[63028]: DEBUG nova.network.neutron [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Port a4f28208-8404-4dcc-a133-bd9f94ad027c binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1252.138828] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.139211] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.139255] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.170820] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1253.171086] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.171222] env[63028]: DEBUG nova.network.neutron [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1253.869608] env[63028]: DEBUG nova.network.neutron [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance_info_cache with network_info: [{"id": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "address": "fa:16:3e:2d:98:bb", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4f28208-84", "ovs_interfaceid": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.372630] env[63028]: DEBUG oslo_concurrency.lockutils [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.897599] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac9050b-7fdf-437f-ba4a-552cda8fe816 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.916603] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143478f7-2f51-467e-afbf-3de18955438a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.923558] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance '670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4' progress to 83 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1255.429857] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1255.430466] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba376443-0937-4fae-a8ed-1f7c4e3ba1f4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.437452] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1255.437452] env[63028]: value = "task-2736552" [ 1255.437452] env[63028]: _type = "Task" [ 1255.437452] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.446321] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.947397] env[63028]: DEBUG oslo_vmware.api [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736552, 'name': PowerOnVM_Task, 'duration_secs': 0.374801} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.947651] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1255.947843] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e5d13a-407e-4111-9325-5f1f3915272a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance '670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4' progress to 100 {{(pid=63028) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1258.603193] env[63028]: DEBUG nova.network.neutron [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Port a4f28208-8404-4dcc-a133-bd9f94ad027c binding to destination host cpu-1 is already ACTIVE {{(pid=63028) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1258.603504] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1258.603757] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.603850] env[63028]: DEBUG nova.network.neutron [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1259.341210] env[63028]: DEBUG nova.network.neutron [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance_info_cache with network_info: [{"id": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "address": "fa:16:3e:2d:98:bb", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4f28208-84", "ovs_interfaceid": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.843915] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1260.347124] env[63028]: DEBUG nova.compute.manager [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63028) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1260.347384] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.347624] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.850512] env[63028]: DEBUG nova.objects.instance [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'migration_context' on Instance uuid 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1261.398263] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88403be0-b7c3-43e1-a777-12e73981e7cb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.404816] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd277df-fff2-4bda-a72b-d7aa568a360e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.433668] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebd3aa5-187a-41cd-af48-3e2a9659a1b0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.440736] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c67065-461c-48cb-9971-296f5c340515 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.453287] env[63028]: DEBUG nova.compute.provider_tree [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1261.956880] env[63028]: DEBUG nova.scheduler.client.report [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1262.968580] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.621s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.505283] env[63028]: INFO nova.compute.manager [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Swapping old allocation on dict_keys(['399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2']) held by migration 15573750-a330-4590-ae47-c7d9a85c10f2 for instance [ 1264.525445] env[63028]: DEBUG nova.scheduler.client.report [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Overwriting current allocation {'allocations': {'399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 174}}, 'project_id': 'e85128c5c889438bbb1df571b7756c6a', 'user_id': '25218cd4756d409c9fee41c970fb2d32', 'consumer_generation': 1} on consumer 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4 {{(pid=63028) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1264.602867] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1264.603119] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.603360] env[63028]: DEBUG nova.network.neutron [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1265.315265] env[63028]: DEBUG nova.network.neutron [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance_info_cache with network_info: [{"id": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "address": "fa:16:3e:2d:98:bb", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4f28208-84", "ovs_interfaceid": "a4f28208-8404-4dcc-a133-bd9f94ad027c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1265.817748] env[63028]: DEBUG oslo_concurrency.lockutils [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1265.818252] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1265.818556] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0eb0472-1790-4816-84bb-21f947ed942f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.825845] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1265.825845] env[63028]: value = "task-2736553" [ 1265.825845] env[63028]: _type = "Task" [ 1265.825845] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.835105] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736553, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.336047] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736553, 'name': PowerOffVM_Task, 'duration_secs': 0.182577} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.336047] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1266.336424] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1266.336640] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1266.336910] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1266.337211] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1266.337458] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1266.337708] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1266.338061] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1266.338269] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1266.338452] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1266.338620] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1266.338800] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1266.343782] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-916a52d8-db7c-49cb-a533-66ad80cb6435 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.359969] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1266.359969] env[63028]: value = "task-2736554" [ 1266.359969] env[63028]: _type = "Task" [ 1266.359969] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.367675] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736554, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.870717] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736554, 'name': ReconfigVM_Task, 'duration_secs': 0.139129} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.871231] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdc35ec-20cc-45a9-9508-05041dedb634 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.889619] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1266.889933] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1266.890118] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1266.890315] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1266.890460] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1266.890607] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1266.890810] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1266.890970] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1266.891149] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1266.891314] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1266.891487] env[63028]: DEBUG nova.virt.hardware [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1266.892261] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da53d7b5-36e9-4337-a234-575e07389424 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.898078] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1266.898078] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52815ef7-134b-4cec-0a9a-0ffc53cf8b0e" [ 1266.898078] env[63028]: _type = "Task" [ 1266.898078] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.906917] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52815ef7-134b-4cec-0a9a-0ffc53cf8b0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.407905] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52815ef7-134b-4cec-0a9a-0ffc53cf8b0e, 'name': SearchDatastore_Task, 'duration_secs': 0.009867} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.413005] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1267.413277] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd0f8c8e-d953-46f6-b795-afc22b380ea5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.430927] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1267.430927] env[63028]: value = "task-2736555" [ 1267.430927] env[63028]: _type = "Task" [ 1267.430927] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.440874] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736555, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.946605] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736555, 'name': ReconfigVM_Task, 'duration_secs': 0.17833} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.947068] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=63028) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1267.947716] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2640fd95-37ba-4662-bfce-d27069b043d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.969343] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4/670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1267.969592] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b262984-2859-4b57-9c54-dceb2ad35cb2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.987907] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1267.987907] env[63028]: value = "task-2736556" [ 1267.987907] env[63028]: _type = "Task" [ 1267.987907] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.998088] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736556, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.497544] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.998468] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736556, 'name': ReconfigVM_Task, 'duration_secs': 0.833047} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.998870] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4/670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1268.999552] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281088b4-a55a-4e6f-90bf-026e59269ee6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.016944] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747f0d0a-bb06-465a-8fb1-f72213787502 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.034115] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23ad220-1abf-4304-bdaf-867bef244ef0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.051631] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1471e73-1207-405e-bc66-049a50867334 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.058442] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1269.058600] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7bf17fba-df0a-4c70-9552-d759f28f2acd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.064416] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1269.064416] env[63028]: value = "task-2736557" [ 1269.064416] env[63028]: _type = "Task" [ 1269.064416] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.071660] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736557, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.577212] env[63028]: DEBUG oslo_vmware.api [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736557, 'name': PowerOnVM_Task, 'duration_secs': 0.406708} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.577600] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1270.589404] env[63028]: INFO nova.compute.manager [None req-98840cd3-40e6-4248-ac29-a7cf5b97966a tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance to original state: 'active' [ 1272.119597] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.119986] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.120082] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.120275] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.120452] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.122621] env[63028]: INFO nova.compute.manager [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Terminating instance [ 1272.625993] env[63028]: DEBUG nova.compute.manager [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1272.626287] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1272.627226] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004aea32-5b4f-4217-be7e-e184ddc743e5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.635124] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1272.635372] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d1601a3-876f-4927-b808-1f688fb62049 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.640850] env[63028]: DEBUG oslo_vmware.api [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1272.640850] env[63028]: value = "task-2736558" [ 1272.640850] env[63028]: _type = "Task" [ 1272.640850] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.648105] env[63028]: DEBUG oslo_vmware.api [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.150773] env[63028]: DEBUG oslo_vmware.api [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736558, 'name': PowerOffVM_Task, 'duration_secs': 0.186476} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.151146] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1273.151205] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1273.151452] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d385584f-21b8-4ad5-a126-898139782131 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.210965] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1273.211207] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1273.211367] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleting the datastore file [datastore1] 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1273.211622] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9bee359-9d87-4b71-ae1f-876d342f3208 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.217511] env[63028]: DEBUG oslo_vmware.api [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1273.217511] env[63028]: value = "task-2736560" [ 1273.217511] env[63028]: _type = "Task" [ 1273.217511] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.225237] env[63028]: DEBUG oslo_vmware.api [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736560, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.727561] env[63028]: DEBUG oslo_vmware.api [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736560, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136036} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.727795] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1273.727971] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1273.728212] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1273.728427] env[63028]: INFO nova.compute.manager [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1273.728661] env[63028]: DEBUG oslo.service.loopingcall [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1273.728852] env[63028]: DEBUG nova.compute.manager [-] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1273.728948] env[63028]: DEBUG nova.network.neutron [-] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1274.373632] env[63028]: DEBUG nova.compute.manager [req-6c1fa0dd-7318-43ea-aabc-53061b353415 req-a6d2f70b-be7b-4759-9056-0877c4c547a5 service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Received event network-vif-deleted-a4f28208-8404-4dcc-a133-bd9f94ad027c {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1274.373935] env[63028]: INFO nova.compute.manager [req-6c1fa0dd-7318-43ea-aabc-53061b353415 req-a6d2f70b-be7b-4759-9056-0877c4c547a5 service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Neutron deleted interface a4f28208-8404-4dcc-a133-bd9f94ad027c; detaching it from the instance and deleting it from the info cache [ 1274.374202] env[63028]: DEBUG nova.network.neutron [req-6c1fa0dd-7318-43ea-aabc-53061b353415 req-a6d2f70b-be7b-4759-9056-0877c4c547a5 service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.850744] env[63028]: DEBUG nova.network.neutron [-] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.876861] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d4b907e-9cef-4b7a-83a2-6ab2defe4f38 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.886114] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98b9bb9-a417-4995-b6e4-fc0c9422621c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.909477] env[63028]: DEBUG nova.compute.manager [req-6c1fa0dd-7318-43ea-aabc-53061b353415 req-a6d2f70b-be7b-4759-9056-0877c4c547a5 service nova] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Detach interface failed, port_id=a4f28208-8404-4dcc-a133-bd9f94ad027c, reason: Instance 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1275.354016] env[63028]: INFO nova.compute.manager [-] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Took 1.62 seconds to deallocate network for instance. [ 1275.860970] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.861374] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.861454] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1275.887830] env[63028]: INFO nova.scheduler.client.report [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleted allocations for instance 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4 [ 1276.395659] env[63028]: DEBUG oslo_concurrency.lockutils [None req-b6637f19-720a-4b45-853b-7b033b7fce96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.275s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1276.940778] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "37a202b5-78f6-45a0-b753-7f9747214f3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.941105] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "37a202b5-78f6-45a0-b753-7f9747214f3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.443160] env[63028]: DEBUG nova.compute.manager [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1277.963778] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.964092] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.965550] env[63028]: INFO nova.compute.claims [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1279.004822] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5686a84-05fd-4587-9c83-fbb5497b8e53 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.012315] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a414e8-5132-4333-87bd-906dd08120cd {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.041628] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e43a053-c215-4494-89ab-5772014cbdb4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.048318] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695b51ca-57a8-49de-a0db-254da22ca5ef {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.060974] env[63028]: DEBUG nova.compute.provider_tree [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1279.564369] env[63028]: DEBUG nova.scheduler.client.report [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1280.069269] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.105s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.069795] env[63028]: DEBUG nova.compute.manager [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1280.575645] env[63028]: DEBUG nova.compute.utils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1280.577045] env[63028]: DEBUG nova.compute.manager [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1280.577201] env[63028]: DEBUG nova.network.neutron [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1280.623843] env[63028]: DEBUG nova.policy [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25218cd4756d409c9fee41c970fb2d32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e85128c5c889438bbb1df571b7756c6a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1280.894102] env[63028]: DEBUG nova.network.neutron [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Successfully created port: 3de00577-cb32-4425-bfe5-8c40b4fd4e52 {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1281.080817] env[63028]: DEBUG nova.compute.manager [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1282.090870] env[63028]: DEBUG nova.compute.manager [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1282.120599] env[63028]: DEBUG nova.virt.hardware [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1282.120863] env[63028]: DEBUG nova.virt.hardware [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1282.121030] env[63028]: DEBUG nova.virt.hardware [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1282.121211] env[63028]: DEBUG nova.virt.hardware [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1282.121358] env[63028]: DEBUG nova.virt.hardware [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1282.121508] env[63028]: DEBUG nova.virt.hardware [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1282.122069] env[63028]: DEBUG nova.virt.hardware [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1282.122069] env[63028]: DEBUG nova.virt.hardware [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1282.122069] env[63028]: DEBUG nova.virt.hardware [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1282.122242] env[63028]: DEBUG nova.virt.hardware [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1282.122364] env[63028]: DEBUG nova.virt.hardware [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1282.123246] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fad69b4-8387-44e1-8f36-a02538962540 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.131281] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf07015c-ce71-4e4e-b8b4-a5cab132062d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.248398] env[63028]: DEBUG nova.compute.manager [req-446befdd-70e4-4f9f-b865-e02da13f0330 req-652317ee-a93a-4abc-9837-93b7ece9f5cc service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Received event network-vif-plugged-3de00577-cb32-4425-bfe5-8c40b4fd4e52 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1282.248626] env[63028]: DEBUG oslo_concurrency.lockutils [req-446befdd-70e4-4f9f-b865-e02da13f0330 req-652317ee-a93a-4abc-9837-93b7ece9f5cc service nova] Acquiring lock "37a202b5-78f6-45a0-b753-7f9747214f3a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.248866] env[63028]: DEBUG oslo_concurrency.lockutils [req-446befdd-70e4-4f9f-b865-e02da13f0330 req-652317ee-a93a-4abc-9837-93b7ece9f5cc service nova] Lock "37a202b5-78f6-45a0-b753-7f9747214f3a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.249045] env[63028]: DEBUG oslo_concurrency.lockutils [req-446befdd-70e4-4f9f-b865-e02da13f0330 req-652317ee-a93a-4abc-9837-93b7ece9f5cc service nova] Lock "37a202b5-78f6-45a0-b753-7f9747214f3a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.249218] env[63028]: DEBUG nova.compute.manager [req-446befdd-70e4-4f9f-b865-e02da13f0330 req-652317ee-a93a-4abc-9837-93b7ece9f5cc service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] No waiting events found dispatching network-vif-plugged-3de00577-cb32-4425-bfe5-8c40b4fd4e52 {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1282.249428] env[63028]: WARNING nova.compute.manager [req-446befdd-70e4-4f9f-b865-e02da13f0330 req-652317ee-a93a-4abc-9837-93b7ece9f5cc service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Received unexpected event network-vif-plugged-3de00577-cb32-4425-bfe5-8c40b4fd4e52 for instance with vm_state building and task_state spawning. [ 1282.324438] env[63028]: DEBUG nova.network.neutron [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Successfully updated port: 3de00577-cb32-4425-bfe5-8c40b4fd4e52 {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1282.827391] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.827491] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.827626] env[63028]: DEBUG nova.network.neutron [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1283.358501] env[63028]: DEBUG nova.network.neutron [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1283.481230] env[63028]: DEBUG nova.network.neutron [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Updating instance_info_cache with network_info: [{"id": "3de00577-cb32-4425-bfe5-8c40b4fd4e52", "address": "fa:16:3e:0e:43:68", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3de00577-cb", "ovs_interfaceid": "3de00577-cb32-4425-bfe5-8c40b4fd4e52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.984634] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.984969] env[63028]: DEBUG nova.compute.manager [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Instance network_info: |[{"id": "3de00577-cb32-4425-bfe5-8c40b4fd4e52", "address": "fa:16:3e:0e:43:68", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3de00577-cb", "ovs_interfaceid": "3de00577-cb32-4425-bfe5-8c40b4fd4e52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1283.985464] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:43:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3de00577-cb32-4425-bfe5-8c40b4fd4e52', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1283.993264] env[63028]: DEBUG oslo.service.loopingcall [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1283.993467] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1283.994069] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1aa95dad-604c-4383-864c-c95894c48b38 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.013867] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1284.013867] env[63028]: value = "task-2736561" [ 1284.013867] env[63028]: _type = "Task" [ 1284.013867] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.021214] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736561, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.275373] env[63028]: DEBUG nova.compute.manager [req-047af6d1-cb12-4062-92d8-12fe0ed12c3b req-21cbf92d-ab1c-49f2-8523-1a89d74f1a46 service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Received event network-changed-3de00577-cb32-4425-bfe5-8c40b4fd4e52 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1284.275586] env[63028]: DEBUG nova.compute.manager [req-047af6d1-cb12-4062-92d8-12fe0ed12c3b req-21cbf92d-ab1c-49f2-8523-1a89d74f1a46 service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Refreshing instance network info cache due to event network-changed-3de00577-cb32-4425-bfe5-8c40b4fd4e52. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1284.275812] env[63028]: DEBUG oslo_concurrency.lockutils [req-047af6d1-cb12-4062-92d8-12fe0ed12c3b req-21cbf92d-ab1c-49f2-8523-1a89d74f1a46 service nova] Acquiring lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1284.275956] env[63028]: DEBUG oslo_concurrency.lockutils [req-047af6d1-cb12-4062-92d8-12fe0ed12c3b req-21cbf92d-ab1c-49f2-8523-1a89d74f1a46 service nova] Acquired lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.276136] env[63028]: DEBUG nova.network.neutron [req-047af6d1-cb12-4062-92d8-12fe0ed12c3b req-21cbf92d-ab1c-49f2-8523-1a89d74f1a46 service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Refreshing network info cache for port 3de00577-cb32-4425-bfe5-8c40b4fd4e52 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1284.523736] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736561, 'name': CreateVM_Task, 'duration_secs': 0.290642} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.524113] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1284.524522] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1284.524693] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.525034] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1284.525287] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b51689b9-6564-411c-a2fa-8d1ddffa6874 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.529395] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1284.529395] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52be1762-6d1f-2ebb-691c-0f78f129fc4c" [ 1284.529395] env[63028]: _type = "Task" [ 1284.529395] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.538148] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52be1762-6d1f-2ebb-691c-0f78f129fc4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.957212] env[63028]: DEBUG nova.network.neutron [req-047af6d1-cb12-4062-92d8-12fe0ed12c3b req-21cbf92d-ab1c-49f2-8523-1a89d74f1a46 service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Updated VIF entry in instance network info cache for port 3de00577-cb32-4425-bfe5-8c40b4fd4e52. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1284.957580] env[63028]: DEBUG nova.network.neutron [req-047af6d1-cb12-4062-92d8-12fe0ed12c3b req-21cbf92d-ab1c-49f2-8523-1a89d74f1a46 service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Updating instance_info_cache with network_info: [{"id": "3de00577-cb32-4425-bfe5-8c40b4fd4e52", "address": "fa:16:3e:0e:43:68", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3de00577-cb", "ovs_interfaceid": "3de00577-cb32-4425-bfe5-8c40b4fd4e52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.039981] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52be1762-6d1f-2ebb-691c-0f78f129fc4c, 'name': SearchDatastore_Task, 'duration_secs': 0.009671} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.042023] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1285.042023] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1285.042023] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1285.042023] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.042023] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1285.042023] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15ae94bf-23d8-411e-abb9-f6de9b98ed0e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.049247] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1285.049416] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1285.050098] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9485c4d3-35f6-4aaf-931c-b0d2fd43dd54 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.054604] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1285.054604] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522c266d-202b-61c6-611b-20baa82d2b4b" [ 1285.054604] env[63028]: _type = "Task" [ 1285.054604] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.061310] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522c266d-202b-61c6-611b-20baa82d2b4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.460196] env[63028]: DEBUG oslo_concurrency.lockutils [req-047af6d1-cb12-4062-92d8-12fe0ed12c3b req-21cbf92d-ab1c-49f2-8523-1a89d74f1a46 service nova] Releasing lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1285.564311] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]522c266d-202b-61c6-611b-20baa82d2b4b, 'name': SearchDatastore_Task, 'duration_secs': 0.007738} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.565064] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dee50537-1d4d-4c70-b05d-5461a8e84e01 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.570165] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1285.570165] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524a9ac0-9417-33bb-48bf-8dd6f37ad5ec" [ 1285.570165] env[63028]: _type = "Task" [ 1285.570165] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.577496] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524a9ac0-9417-33bb-48bf-8dd6f37ad5ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.079777] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]524a9ac0-9417-33bb-48bf-8dd6f37ad5ec, 'name': SearchDatastore_Task, 'duration_secs': 0.010648} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.080045] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1286.080296] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 37a202b5-78f6-45a0-b753-7f9747214f3a/37a202b5-78f6-45a0-b753-7f9747214f3a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1286.080548] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14370b88-dc7e-4b19-9dc0-d3aeb8909684 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.086833] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1286.086833] env[63028]: value = "task-2736562" [ 1286.086833] env[63028]: _type = "Task" [ 1286.086833] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.094449] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736562, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.596596] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736562, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.431571} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.597050] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 37a202b5-78f6-45a0-b753-7f9747214f3a/37a202b5-78f6-45a0-b753-7f9747214f3a.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1286.597194] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1286.597372] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b89ec0b-1354-4e64-b1d7-e007558d2bab {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.603772] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1286.603772] env[63028]: value = "task-2736563" [ 1286.603772] env[63028]: _type = "Task" [ 1286.603772] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.610452] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736563, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.113356] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736563, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065397} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.113612] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1287.114355] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f607769-f4f8-4aca-bfdb-ed826a66fe27 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.136047] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 37a202b5-78f6-45a0-b753-7f9747214f3a/37a202b5-78f6-45a0-b753-7f9747214f3a.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1287.136047] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1131623-0e31-44ef-ae9c-93e3c2f4eb5d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.154012] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1287.154012] env[63028]: value = "task-2736564" [ 1287.154012] env[63028]: _type = "Task" [ 1287.154012] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.161064] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736564, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.665180] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736564, 'name': ReconfigVM_Task, 'duration_secs': 0.25904} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.665587] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 37a202b5-78f6-45a0-b753-7f9747214f3a/37a202b5-78f6-45a0-b753-7f9747214f3a.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1287.666231] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9923cb93-99f8-4bcf-b659-d3ed185f831a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.672114] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1287.672114] env[63028]: value = "task-2736565" [ 1287.672114] env[63028]: _type = "Task" [ 1287.672114] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.679454] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736565, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.181438] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736565, 'name': Rename_Task, 'duration_secs': 0.136856} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.181905] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1288.182250] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e43700c-cf7a-490b-8ac0-b4f355000c13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.187995] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1288.187995] env[63028]: value = "task-2736566" [ 1288.187995] env[63028]: _type = "Task" [ 1288.187995] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.194814] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.697943] env[63028]: DEBUG oslo_vmware.api [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736566, 'name': PowerOnVM_Task, 'duration_secs': 0.424018} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.698324] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1288.698404] env[63028]: INFO nova.compute.manager [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Took 6.61 seconds to spawn the instance on the hypervisor. [ 1288.698584] env[63028]: DEBUG nova.compute.manager [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1288.699326] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2c69d2-2fd3-493b-b02f-fb2e56cf320b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.218134] env[63028]: INFO nova.compute.manager [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Took 11.27 seconds to build instance. [ 1289.720798] env[63028]: DEBUG oslo_concurrency.lockutils [None req-4ad9a670-1b56-4273-b9a6-09c7f466272b tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "37a202b5-78f6-45a0-b753-7f9747214f3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.780s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.866144] env[63028]: DEBUG nova.compute.manager [req-ebab5fed-77c2-4209-984a-b393fe595dfe req-abcd5a55-1b40-4e9e-a2a1-4e3cc28a8c43 service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Received event network-changed-3de00577-cb32-4425-bfe5-8c40b4fd4e52 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1289.866364] env[63028]: DEBUG nova.compute.manager [req-ebab5fed-77c2-4209-984a-b393fe595dfe req-abcd5a55-1b40-4e9e-a2a1-4e3cc28a8c43 service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Refreshing instance network info cache due to event network-changed-3de00577-cb32-4425-bfe5-8c40b4fd4e52. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1289.866591] env[63028]: DEBUG oslo_concurrency.lockutils [req-ebab5fed-77c2-4209-984a-b393fe595dfe req-abcd5a55-1b40-4e9e-a2a1-4e3cc28a8c43 service nova] Acquiring lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1289.866762] env[63028]: DEBUG oslo_concurrency.lockutils [req-ebab5fed-77c2-4209-984a-b393fe595dfe req-abcd5a55-1b40-4e9e-a2a1-4e3cc28a8c43 service nova] Acquired lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.866891] env[63028]: DEBUG nova.network.neutron [req-ebab5fed-77c2-4209-984a-b393fe595dfe req-abcd5a55-1b40-4e9e-a2a1-4e3cc28a8c43 service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Refreshing network info cache for port 3de00577-cb32-4425-bfe5-8c40b4fd4e52 {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1290.560876] env[63028]: DEBUG nova.network.neutron [req-ebab5fed-77c2-4209-984a-b393fe595dfe req-abcd5a55-1b40-4e9e-a2a1-4e3cc28a8c43 service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Updated VIF entry in instance network info cache for port 3de00577-cb32-4425-bfe5-8c40b4fd4e52. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1290.561249] env[63028]: DEBUG nova.network.neutron [req-ebab5fed-77c2-4209-984a-b393fe595dfe req-abcd5a55-1b40-4e9e-a2a1-4e3cc28a8c43 service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Updating instance_info_cache with network_info: [{"id": "3de00577-cb32-4425-bfe5-8c40b4fd4e52", "address": "fa:16:3e:0e:43:68", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3de00577-cb", "ovs_interfaceid": "3de00577-cb32-4425-bfe5-8c40b4fd4e52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.064317] env[63028]: DEBUG oslo_concurrency.lockutils [req-ebab5fed-77c2-4209-984a-b393fe595dfe req-abcd5a55-1b40-4e9e-a2a1-4e3cc28a8c43 service nova] Releasing lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1300.983811] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.984221] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.984221] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.984357] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.984474] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.984617] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.984767] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.984903] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63028) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1300.985066] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1301.488734] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.488998] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.489157] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.489314] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63028) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1301.490633] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a134543-9904-4f49-bf70-bdc34b85e56d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.498607] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f415dc4e-78ec-4748-9062-3a3199d92e53 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.514008] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40616202-64ab-4f5d-b648-aeb46499fb29 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.519962] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ff8cec-91ae-4113-8444-199fa1f33ed2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.547667] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180521MB free_disk=111GB free_vcpus=48 pci_devices=None {{(pid=63028) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1301.547807] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.547983] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.570703] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 37a202b5-78f6-45a0-b753-7f9747214f3a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.570983] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1302.571081] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1302.594645] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb46d16-755d-4372-b3db-6e61a2a677e4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.602258] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b911f355-21ce-4157-9d3c-7d61a237109d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.631820] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b164be-410d-4857-a508-5706d040c9c8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.638600] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a5950b-aa69-4055-a9f2-c5b45c23783f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.651187] env[63028]: DEBUG nova.compute.provider_tree [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1303.154709] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1303.659934] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1303.660373] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.112s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.274803] env[63028]: DEBUG oslo_concurrency.lockutils [None req-634697fb-09f3-45e4-a323-67cc98d6d99f tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "37a202b5-78f6-45a0-b753-7f9747214f3a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.275234] env[63028]: DEBUG oslo_concurrency.lockutils [None req-634697fb-09f3-45e4-a323-67cc98d6d99f tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "37a202b5-78f6-45a0-b753-7f9747214f3a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.275293] env[63028]: DEBUG nova.compute.manager [None req-634697fb-09f3-45e4-a323-67cc98d6d99f tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1327.276235] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d6f7ff-4ad9-4340-8049-a1b226e65d3c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.282800] env[63028]: DEBUG nova.compute.manager [None req-634697fb-09f3-45e4-a323-67cc98d6d99f tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63028) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1327.283374] env[63028]: DEBUG nova.objects.instance [None req-634697fb-09f3-45e4-a323-67cc98d6d99f tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'flavor' on Instance uuid 37a202b5-78f6-45a0-b753-7f9747214f3a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1328.291140] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-634697fb-09f3-45e4-a323-67cc98d6d99f tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1328.291571] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98094485-ff22-4623-90ef-811711571ca8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.298972] env[63028]: DEBUG oslo_vmware.api [None req-634697fb-09f3-45e4-a323-67cc98d6d99f tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1328.298972] env[63028]: value = "task-2736567" [ 1328.298972] env[63028]: _type = "Task" [ 1328.298972] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.307033] env[63028]: DEBUG oslo_vmware.api [None req-634697fb-09f3-45e4-a323-67cc98d6d99f tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736567, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.808861] env[63028]: DEBUG oslo_vmware.api [None req-634697fb-09f3-45e4-a323-67cc98d6d99f tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736567, 'name': PowerOffVM_Task, 'duration_secs': 0.200609} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.809121] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-634697fb-09f3-45e4-a323-67cc98d6d99f tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1328.809319] env[63028]: DEBUG nova.compute.manager [None req-634697fb-09f3-45e4-a323-67cc98d6d99f tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1328.810057] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775ef924-f530-4be8-bed7-d65e385ed05f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.321684] env[63028]: DEBUG oslo_concurrency.lockutils [None req-634697fb-09f3-45e4-a323-67cc98d6d99f tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "37a202b5-78f6-45a0-b753-7f9747214f3a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.683366] env[63028]: DEBUG nova.objects.instance [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'flavor' on Instance uuid 37a202b5-78f6-45a0-b753-7f9747214f3a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1330.189045] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.189302] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.189562] env[63028]: DEBUG nova.network.neutron [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1330.189830] env[63028]: DEBUG nova.objects.instance [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'info_cache' on Instance uuid 37a202b5-78f6-45a0-b753-7f9747214f3a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1330.693841] env[63028]: DEBUG nova.objects.base [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Object Instance<37a202b5-78f6-45a0-b753-7f9747214f3a> lazy-loaded attributes: flavor,info_cache {{(pid=63028) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1331.395639] env[63028]: DEBUG nova.network.neutron [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Updating instance_info_cache with network_info: [{"id": "3de00577-cb32-4425-bfe5-8c40b4fd4e52", "address": "fa:16:3e:0e:43:68", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3de00577-cb", "ovs_interfaceid": "3de00577-cb32-4425-bfe5-8c40b4fd4e52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.898590] env[63028]: DEBUG oslo_concurrency.lockutils [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1332.906079] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1332.906486] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c811351f-dc6c-4918-b7b3-6559893507d2 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.914164] env[63028]: DEBUG oslo_vmware.api [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1332.914164] env[63028]: value = "task-2736568" [ 1332.914164] env[63028]: _type = "Task" [ 1332.914164] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.921587] env[63028]: DEBUG oslo_vmware.api [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736568, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.423807] env[63028]: DEBUG oslo_vmware.api [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736568, 'name': PowerOnVM_Task, 'duration_secs': 0.336252} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.424070] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1333.424277] env[63028]: DEBUG nova.compute.manager [None req-0b09cb24-b305-4ac7-bd6a-25bc42213bca tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1333.425029] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11de587e-67e6-49e7-8e2e-c0e1bdc24d6e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.428647] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370c9b6d-fd4d-4c97-a1ea-22503d6016de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.435530] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d508054e-3c63-4619-b139-3b55ff231c96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Suspending the VM {{(pid=63028) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1334.435747] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-13f257dd-7b4d-4bf7-a2ae-d612002f75f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.441784] env[63028]: DEBUG oslo_vmware.api [None req-d508054e-3c63-4619-b139-3b55ff231c96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1334.441784] env[63028]: value = "task-2736569" [ 1334.441784] env[63028]: _type = "Task" [ 1334.441784] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.449338] env[63028]: DEBUG oslo_vmware.api [None req-d508054e-3c63-4619-b139-3b55ff231c96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736569, 'name': SuspendVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.951575] env[63028]: DEBUG oslo_vmware.api [None req-d508054e-3c63-4619-b139-3b55ff231c96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736569, 'name': SuspendVM_Task} progress is 100%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.452268] env[63028]: DEBUG oslo_vmware.api [None req-d508054e-3c63-4619-b139-3b55ff231c96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736569, 'name': SuspendVM_Task, 'duration_secs': 0.540425} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.452649] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-d508054e-3c63-4619-b139-3b55ff231c96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Suspended the VM {{(pid=63028) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1335.452694] env[63028]: DEBUG nova.compute.manager [None req-d508054e-3c63-4619-b139-3b55ff231c96 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1335.453442] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96de97f-5a12-4a95-a73a-a40545424cae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.736566] env[63028]: INFO nova.compute.manager [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Resuming [ 1336.737259] env[63028]: DEBUG nova.objects.instance [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'flavor' on Instance uuid 37a202b5-78f6-45a0-b753-7f9747214f3a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1338.249056] env[63028]: DEBUG oslo_concurrency.lockutils [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1338.249056] env[63028]: DEBUG oslo_concurrency.lockutils [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquired lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1338.249056] env[63028]: DEBUG nova.network.neutron [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1338.938031] env[63028]: DEBUG nova.network.neutron [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Updating instance_info_cache with network_info: [{"id": "3de00577-cb32-4425-bfe5-8c40b4fd4e52", "address": "fa:16:3e:0e:43:68", "network": {"id": "ec7daf90-9857-4c67-8588-1d8404499409", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-574413795-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e85128c5c889438bbb1df571b7756c6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3de00577-cb", "ovs_interfaceid": "3de00577-cb32-4425-bfe5-8c40b4fd4e52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.441837] env[63028]: DEBUG oslo_concurrency.lockutils [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Releasing lock "refresh_cache-37a202b5-78f6-45a0-b753-7f9747214f3a" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1339.442869] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa62d29-3aa1-4fa2-95a9-28bfff3ed05d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.449321] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Resuming the VM {{(pid=63028) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1339.449548] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aed3a42c-bdf2-4dc8-b385-be7fbc0d987e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.455714] env[63028]: DEBUG oslo_vmware.api [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1339.455714] env[63028]: value = "task-2736570" [ 1339.455714] env[63028]: _type = "Task" [ 1339.455714] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.463093] env[63028]: DEBUG oslo_vmware.api [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736570, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.970181] env[63028]: DEBUG oslo_vmware.api [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736570, 'name': PowerOnVM_Task, 'duration_secs': 0.481388} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.970511] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Resumed the VM {{(pid=63028) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1339.970636] env[63028]: DEBUG nova.compute.manager [None req-155b4e7b-08ca-44bc-861c-d4204f472e92 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1339.971463] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08506b73-e773-426c-bb2a-8d8dc977a405 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.369239] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "37a202b5-78f6-45a0-b753-7f9747214f3a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.369616] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "37a202b5-78f6-45a0-b753-7f9747214f3a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.369683] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "37a202b5-78f6-45a0-b753-7f9747214f3a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.369848] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "37a202b5-78f6-45a0-b753-7f9747214f3a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.370028] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "37a202b5-78f6-45a0-b753-7f9747214f3a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.372104] env[63028]: INFO nova.compute.manager [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Terminating instance [ 1341.875727] env[63028]: DEBUG nova.compute.manager [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1341.875965] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1341.876889] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ceb9b69-8330-42e4-ab1e-7769e0353f2a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.884678] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1341.884896] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6eda97dd-dfdb-4086-8090-1aabf953bca9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.894849] env[63028]: DEBUG oslo_vmware.api [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1341.894849] env[63028]: value = "task-2736571" [ 1341.894849] env[63028]: _type = "Task" [ 1341.894849] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.902718] env[63028]: DEBUG oslo_vmware.api [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736571, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.404330] env[63028]: DEBUG oslo_vmware.api [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736571, 'name': PowerOffVM_Task, 'duration_secs': 0.16668} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.404704] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1342.404789] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1342.404981] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7574a6e7-b4b6-476f-bd29-6427b1dd6739 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.463573] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1342.463780] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1342.463958] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleting the datastore file [datastore1] 37a202b5-78f6-45a0-b753-7f9747214f3a {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1342.464225] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c034a7d2-0dfd-413d-bcd8-64705a6a91da {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.470475] env[63028]: DEBUG oslo_vmware.api [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for the task: (returnval){ [ 1342.470475] env[63028]: value = "task-2736573" [ 1342.470475] env[63028]: _type = "Task" [ 1342.470475] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.477679] env[63028]: DEBUG oslo_vmware.api [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736573, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.980159] env[63028]: DEBUG oslo_vmware.api [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Task: {'id': task-2736573, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135838} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.980419] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1342.980590] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1342.980762] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1342.980934] env[63028]: INFO nova.compute.manager [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1342.981205] env[63028]: DEBUG oslo.service.loopingcall [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1342.981395] env[63028]: DEBUG nova.compute.manager [-] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1342.981488] env[63028]: DEBUG nova.network.neutron [-] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1343.419764] env[63028]: DEBUG nova.compute.manager [req-8e873efd-3039-4931-9ed2-af144076bf2d req-8580a011-e6ca-4d02-aee7-e40b5952c1dc service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Received event network-vif-deleted-3de00577-cb32-4425-bfe5-8c40b4fd4e52 {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1343.420100] env[63028]: INFO nova.compute.manager [req-8e873efd-3039-4931-9ed2-af144076bf2d req-8580a011-e6ca-4d02-aee7-e40b5952c1dc service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Neutron deleted interface 3de00577-cb32-4425-bfe5-8c40b4fd4e52; detaching it from the instance and deleting it from the info cache [ 1343.420435] env[63028]: DEBUG nova.network.neutron [req-8e873efd-3039-4931-9ed2-af144076bf2d req-8580a011-e6ca-4d02-aee7-e40b5952c1dc service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.905202] env[63028]: DEBUG nova.network.neutron [-] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.924302] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a9f2d03-2096-4838-8f28-6186278931ae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.935385] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d8dfd0-e6a4-41bb-b7ec-af1ea300542c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.958319] env[63028]: DEBUG nova.compute.manager [req-8e873efd-3039-4931-9ed2-af144076bf2d req-8580a011-e6ca-4d02-aee7-e40b5952c1dc service nova] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Detach interface failed, port_id=3de00577-cb32-4425-bfe5-8c40b4fd4e52, reason: Instance 37a202b5-78f6-45a0-b753-7f9747214f3a could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1344.407771] env[63028]: INFO nova.compute.manager [-] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Took 1.43 seconds to deallocate network for instance. [ 1344.914343] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.914716] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.914778] env[63028]: DEBUG nova.objects.instance [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lazy-loading 'resources' on Instance uuid 37a202b5-78f6-45a0-b753-7f9747214f3a {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1345.448612] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91da1d02-9cc0-458d-b7a5-2ab9acf607a6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.455873] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445ffa3e-38c0-4661-80ea-6f86551b5c7f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.485683] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81398a97-faef-4c29-a8bd-19addb290f84 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.492043] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f2ecef-30a2-4ff6-99ac-7599930923f3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.504345] env[63028]: DEBUG nova.compute.provider_tree [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.007553] env[63028]: DEBUG nova.scheduler.client.report [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1346.512480] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.531850] env[63028]: INFO nova.scheduler.client.report [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Deleted allocations for instance 37a202b5-78f6-45a0-b753-7f9747214f3a [ 1347.040285] env[63028]: DEBUG oslo_concurrency.lockutils [None req-50b93ed7-f903-4284-82a8-3f13c6f14e85 tempest-ServerActionsTestJSON-83366727 tempest-ServerActionsTestJSON-83366727-project-member] Lock "37a202b5-78f6-45a0-b753-7f9747214f3a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.671s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.977942] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquiring lock "068fdccf-16f5-4701-a481-5fa047ab1fa7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.978211] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Lock "068fdccf-16f5-4701-a481-5fa047ab1fa7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.480564] env[63028]: DEBUG nova.compute.manager [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1353.002233] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.002526] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.003897] env[63028]: INFO nova.compute.claims [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1354.043246] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5f18bf-9e98-4f80-973c-3a9a6ad97003 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.050637] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83abeb34-7c92-4f5c-911c-57c05f9f36ce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.079564] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89451464-2ad4-4804-b815-0dfca9b2b9e3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.087259] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22391357-cb61-48b5-9485-c390771ce356 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.100658] env[63028]: DEBUG nova.compute.provider_tree [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1354.603924] env[63028]: DEBUG nova.scheduler.client.report [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1355.109652] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.107s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.110207] env[63028]: DEBUG nova.compute.manager [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1355.615194] env[63028]: DEBUG nova.compute.utils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1355.616654] env[63028]: DEBUG nova.compute.manager [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1355.616822] env[63028]: DEBUG nova.network.neutron [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1355.658958] env[63028]: DEBUG nova.policy [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35c4f21bfc9b40c799b7a0ab8b9efedd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2342baa8f1eb4682b1ddeb51638cdbcd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1355.893338] env[63028]: DEBUG nova.network.neutron [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Successfully created port: 91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1356.119881] env[63028]: DEBUG nova.compute.manager [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1357.128768] env[63028]: DEBUG nova.compute.manager [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1357.160145] env[63028]: DEBUG nova.virt.hardware [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1357.160402] env[63028]: DEBUG nova.virt.hardware [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1357.160560] env[63028]: DEBUG nova.virt.hardware [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1357.160737] env[63028]: DEBUG nova.virt.hardware [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1357.160877] env[63028]: DEBUG nova.virt.hardware [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1357.161031] env[63028]: DEBUG nova.virt.hardware [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1357.161253] env[63028]: DEBUG nova.virt.hardware [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1357.161421] env[63028]: DEBUG nova.virt.hardware [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1357.161588] env[63028]: DEBUG nova.virt.hardware [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1357.161749] env[63028]: DEBUG nova.virt.hardware [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1357.161919] env[63028]: DEBUG nova.virt.hardware [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1357.162780] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9b68ee-49e1-4a52-9750-f4acbf426b21 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.170678] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08abbaa1-88cb-4df2-9d91-06ec77024455 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.250701] env[63028]: DEBUG nova.compute.manager [req-8023a8b6-0367-46f9-89c5-cff54bb9b8ce req-6d9eb60d-6fda-4745-bb06-46b786adff74 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Received event network-vif-plugged-91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1357.250903] env[63028]: DEBUG oslo_concurrency.lockutils [req-8023a8b6-0367-46f9-89c5-cff54bb9b8ce req-6d9eb60d-6fda-4745-bb06-46b786adff74 service nova] Acquiring lock "068fdccf-16f5-4701-a481-5fa047ab1fa7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.251129] env[63028]: DEBUG oslo_concurrency.lockutils [req-8023a8b6-0367-46f9-89c5-cff54bb9b8ce req-6d9eb60d-6fda-4745-bb06-46b786adff74 service nova] Lock "068fdccf-16f5-4701-a481-5fa047ab1fa7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.251300] env[63028]: DEBUG oslo_concurrency.lockutils [req-8023a8b6-0367-46f9-89c5-cff54bb9b8ce req-6d9eb60d-6fda-4745-bb06-46b786adff74 service nova] Lock "068fdccf-16f5-4701-a481-5fa047ab1fa7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.251467] env[63028]: DEBUG nova.compute.manager [req-8023a8b6-0367-46f9-89c5-cff54bb9b8ce req-6d9eb60d-6fda-4745-bb06-46b786adff74 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] No waiting events found dispatching network-vif-plugged-91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1357.251630] env[63028]: WARNING nova.compute.manager [req-8023a8b6-0367-46f9-89c5-cff54bb9b8ce req-6d9eb60d-6fda-4745-bb06-46b786adff74 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Received unexpected event network-vif-plugged-91f54c2c-815f-47bd-aa41-2d2ede7ba86d for instance with vm_state building and task_state spawning. [ 1357.327944] env[63028]: DEBUG nova.network.neutron [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Successfully updated port: 91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1357.830511] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquiring lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1357.830813] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquired lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.831111] env[63028]: DEBUG nova.network.neutron [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1358.361431] env[63028]: DEBUG nova.network.neutron [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1358.464663] env[63028]: DEBUG nova.network.neutron [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updating instance_info_cache with network_info: [{"id": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "address": "fa:16:3e:6e:75:a7", "network": {"id": "b78cce39-4f77-4983-a56e-8966d8aaf751", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-744678156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2342baa8f1eb4682b1ddeb51638cdbcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91f54c2c-81", "ovs_interfaceid": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.967371] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Releasing lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1358.967694] env[63028]: DEBUG nova.compute.manager [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Instance network_info: |[{"id": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "address": "fa:16:3e:6e:75:a7", "network": {"id": "b78cce39-4f77-4983-a56e-8966d8aaf751", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-744678156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2342baa8f1eb4682b1ddeb51638cdbcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91f54c2c-81", "ovs_interfaceid": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1358.968120] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:75:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74e6f6e0-95e6-4531-99e9-0e78350fb655', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91f54c2c-815f-47bd-aa41-2d2ede7ba86d', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1358.975347] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Creating folder: Project (2342baa8f1eb4682b1ddeb51638cdbcd). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1358.975962] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db42bf3a-3a42-4602-a84c-c606fe1e3fc9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.987922] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Created folder: Project (2342baa8f1eb4682b1ddeb51638cdbcd) in parent group-v550570. [ 1358.988111] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Creating folder: Instances. Parent ref: group-v550903. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1358.988319] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ae2cbe6-7d78-4522-b7a4-406859b01ce9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.996883] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Created folder: Instances in parent group-v550903. [ 1358.997112] env[63028]: DEBUG oslo.service.loopingcall [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1358.997307] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1358.997468] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9bbdc6fa-b40a-4df9-9179-278f5c2fe066 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.014842] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1359.014842] env[63028]: value = "task-2736576" [ 1359.014842] env[63028]: _type = "Task" [ 1359.014842] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.021565] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736576, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.275923] env[63028]: DEBUG nova.compute.manager [req-74a8ac9b-d9ba-4807-b6be-ef9718e4c8f9 req-f64a4287-a6e3-4718-93cd-1a04e74a0e6d service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Received event network-changed-91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1359.276022] env[63028]: DEBUG nova.compute.manager [req-74a8ac9b-d9ba-4807-b6be-ef9718e4c8f9 req-f64a4287-a6e3-4718-93cd-1a04e74a0e6d service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Refreshing instance network info cache due to event network-changed-91f54c2c-815f-47bd-aa41-2d2ede7ba86d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1359.276293] env[63028]: DEBUG oslo_concurrency.lockutils [req-74a8ac9b-d9ba-4807-b6be-ef9718e4c8f9 req-f64a4287-a6e3-4718-93cd-1a04e74a0e6d service nova] Acquiring lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.276445] env[63028]: DEBUG oslo_concurrency.lockutils [req-74a8ac9b-d9ba-4807-b6be-ef9718e4c8f9 req-f64a4287-a6e3-4718-93cd-1a04e74a0e6d service nova] Acquired lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.276611] env[63028]: DEBUG nova.network.neutron [req-74a8ac9b-d9ba-4807-b6be-ef9718e4c8f9 req-f64a4287-a6e3-4718-93cd-1a04e74a0e6d service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Refreshing network info cache for port 91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1359.524289] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736576, 'name': CreateVM_Task, 'duration_secs': 0.278279} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.524638] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1359.525092] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.525263] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.525583] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1359.525828] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e43ffa02-5d43-4823-95aa-b22c2dca6e76 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.530091] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1359.530091] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a331af-f0f5-5abc-3747-19fc60d125b2" [ 1359.530091] env[63028]: _type = "Task" [ 1359.530091] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.538354] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a331af-f0f5-5abc-3747-19fc60d125b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.939449] env[63028]: DEBUG nova.network.neutron [req-74a8ac9b-d9ba-4807-b6be-ef9718e4c8f9 req-f64a4287-a6e3-4718-93cd-1a04e74a0e6d service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updated VIF entry in instance network info cache for port 91f54c2c-815f-47bd-aa41-2d2ede7ba86d. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1359.939829] env[63028]: DEBUG nova.network.neutron [req-74a8ac9b-d9ba-4807-b6be-ef9718e4c8f9 req-f64a4287-a6e3-4718-93cd-1a04e74a0e6d service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updating instance_info_cache with network_info: [{"id": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "address": "fa:16:3e:6e:75:a7", "network": {"id": "b78cce39-4f77-4983-a56e-8966d8aaf751", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-744678156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2342baa8f1eb4682b1ddeb51638cdbcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91f54c2c-81", "ovs_interfaceid": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.041102] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52a331af-f0f5-5abc-3747-19fc60d125b2, 'name': SearchDatastore_Task, 'duration_secs': 0.009961} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.041416] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.041654] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1360.041885] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1360.042038] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.042220] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1360.042488] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77a84021-cf86-46f4-b935-4505120d0a61 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.052508] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1360.052689] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1360.053428] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6939a1b7-eec4-4206-9e64-37d093b97a9d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.058607] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1360.058607] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52689fb2-eff2-039f-4f6b-5a3d707543aa" [ 1360.058607] env[63028]: _type = "Task" [ 1360.058607] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.065779] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52689fb2-eff2-039f-4f6b-5a3d707543aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.442554] env[63028]: DEBUG oslo_concurrency.lockutils [req-74a8ac9b-d9ba-4807-b6be-ef9718e4c8f9 req-f64a4287-a6e3-4718-93cd-1a04e74a0e6d service nova] Releasing lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.568665] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52689fb2-eff2-039f-4f6b-5a3d707543aa, 'name': SearchDatastore_Task, 'duration_secs': 0.007897} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.569457] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da47d161-1ac5-4e4e-9c6d-a891b546abf7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.574119] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1360.574119] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526dcc0c-fb2e-011e-5411-b188693b6215" [ 1360.574119] env[63028]: _type = "Task" [ 1360.574119] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.580908] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526dcc0c-fb2e-011e-5411-b188693b6215, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.084729] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]526dcc0c-fb2e-011e-5411-b188693b6215, 'name': SearchDatastore_Task, 'duration_secs': 0.00881} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.084982] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.085248] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 068fdccf-16f5-4701-a481-5fa047ab1fa7/068fdccf-16f5-4701-a481-5fa047ab1fa7.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1361.085572] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59ccbf5b-c7d3-44ab-9a58-99b5f014533b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.091909] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1361.091909] env[63028]: value = "task-2736577" [ 1361.091909] env[63028]: _type = "Task" [ 1361.091909] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.099035] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736577, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.602170] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736577, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442434} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.602516] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 068fdccf-16f5-4701-a481-5fa047ab1fa7/068fdccf-16f5-4701-a481-5fa047ab1fa7.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1361.602582] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1361.602822] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87bb258c-5a3e-4ef9-a064-54c7c759b1de {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.608937] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1361.608937] env[63028]: value = "task-2736578" [ 1361.608937] env[63028]: _type = "Task" [ 1361.608937] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.615873] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736578, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.118306] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736578, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068927} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.118560] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1362.119293] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11f85d2-90b4-486b-b353-2b0b53718a7b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.140453] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 068fdccf-16f5-4701-a481-5fa047ab1fa7/068fdccf-16f5-4701-a481-5fa047ab1fa7.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1362.140663] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0c932fb-0b60-4873-86fc-8eaecb6f7552 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.159948] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1362.159948] env[63028]: value = "task-2736579" [ 1362.159948] env[63028]: _type = "Task" [ 1362.159948] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.167281] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736579, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.670799] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736579, 'name': ReconfigVM_Task, 'duration_secs': 0.267676} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.672132] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 068fdccf-16f5-4701-a481-5fa047ab1fa7/068fdccf-16f5-4701-a481-5fa047ab1fa7.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1362.672132] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03d2937b-0251-4bcc-b779-61790a763152 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.678441] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1362.678441] env[63028]: value = "task-2736580" [ 1362.678441] env[63028]: _type = "Task" [ 1362.678441] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.685336] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736580, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.187563] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736580, 'name': Rename_Task, 'duration_secs': 0.138237} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.187811] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1363.188055] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc3b82f4-18b0-4a96-abe8-7d876016cca5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.193840] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1363.193840] env[63028]: value = "task-2736581" [ 1363.193840] env[63028]: _type = "Task" [ 1363.193840] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.201284] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736581, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.661430] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1363.661743] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1363.703488] env[63028]: DEBUG oslo_vmware.api [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736581, 'name': PowerOnVM_Task, 'duration_secs': 0.443159} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.703766] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1363.703938] env[63028]: INFO nova.compute.manager [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Took 6.57 seconds to spawn the instance on the hypervisor. [ 1363.704176] env[63028]: DEBUG nova.compute.manager [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1363.705062] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b3f022-246d-4c92-8550-bf7a257a8959 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.166398] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.166696] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.166928] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.167200] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.167797] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.167797] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.167948] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63028) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1364.168184] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.220058] env[63028]: INFO nova.compute.manager [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Took 11.23 seconds to build instance. [ 1364.672333] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.672333] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.672333] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.672489] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63028) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1364.673389] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a26861-0645-4e02-aa6c-4023396cae13 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.681846] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4459b4-05b9-40e2-bd5f-d3ca0f2adcae {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.695523] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712de25e-d737-476c-8451-e540a853a509 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.701995] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbe0217-88c0-4ebf-9b31-df7abaf1e7fc {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.731677] env[63028]: DEBUG oslo_concurrency.lockutils [None req-13d3f978-a524-4a7e-af2f-2fec9c4dee18 tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Lock "068fdccf-16f5-4701-a481-5fa047ab1fa7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.753s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.731997] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180741MB free_disk=111GB free_vcpus=48 pci_devices=None {{(pid=63028) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1364.732145] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.732336] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.319020] env[63028]: INFO nova.compute.manager [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Rescuing [ 1365.319317] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquiring lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.319472] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquired lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.319636] env[63028]: DEBUG nova.network.neutron [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1365.756790] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Instance 068fdccf-16f5-4701-a481-5fa047ab1fa7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63028) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1365.757032] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1365.757146] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1365.780717] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce527f4-fbe7-4f5e-acee-4c5466363bce {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.787778] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7921cdb2-e631-4af3-b67e-0f2897a0eb81 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.816178] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda577ca-3f89-483f-9b2c-38ae51843fa5 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.824586] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ebf31bb-3789-47a8-8ee1-f1aa7fa03eb0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.838433] env[63028]: DEBUG nova.compute.provider_tree [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1365.997600] env[63028]: DEBUG nova.network.neutron [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updating instance_info_cache with network_info: [{"id": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "address": "fa:16:3e:6e:75:a7", "network": {"id": "b78cce39-4f77-4983-a56e-8966d8aaf751", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-744678156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2342baa8f1eb4682b1ddeb51638cdbcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91f54c2c-81", "ovs_interfaceid": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.341562] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1366.500161] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Releasing lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.846678] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1366.847041] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.030201] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1368.030544] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3fd720d-f082-41e0-ae5c-4837e240249f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.039681] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1368.039681] env[63028]: value = "task-2736582" [ 1368.039681] env[63028]: _type = "Task" [ 1368.039681] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.047136] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736582, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.549063] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736582, 'name': PowerOffVM_Task, 'duration_secs': 0.179108} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.549329] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1368.550099] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808bf18c-3277-4210-ba0c-69d0c176c184 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.567674] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a443ca-a7dd-425f-8217-42480dddd103 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.592583] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1368.592826] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db8efe67-186d-4d0c-b00c-b31fdbf41590 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.598257] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1368.598257] env[63028]: value = "task-2736583" [ 1368.598257] env[63028]: _type = "Task" [ 1368.598257] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.605389] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.110182] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] VM already powered off {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1369.110540] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1369.110627] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.110770] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.110941] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1369.111195] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acc84a4e-9d7a-4bb2-beaa-f994a77a3d90 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.120063] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1369.120230] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1369.120875] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-828c65d3-76eb-41a8-9de0-a59097520112 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.125290] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1369.125290] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520c947c-0d05-06d3-f25f-bfd9731b6744" [ 1369.125290] env[63028]: _type = "Task" [ 1369.125290] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.132590] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520c947c-0d05-06d3-f25f-bfd9731b6744, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.635349] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]520c947c-0d05-06d3-f25f-bfd9731b6744, 'name': SearchDatastore_Task, 'duration_secs': 0.008331} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.636118] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48dbd936-0fef-48ca-8700-b58e7feae85d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.641583] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1369.641583] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52760299-262e-d4dc-39a8-f941144e69e4" [ 1369.641583] env[63028]: _type = "Task" [ 1369.641583] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.648569] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52760299-262e-d4dc-39a8-f941144e69e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.152316] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]52760299-262e-d4dc-39a8-f941144e69e4, 'name': SearchDatastore_Task, 'duration_secs': 0.009184} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.152675] env[63028]: DEBUG oslo_concurrency.lockutils [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.152807] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 068fdccf-16f5-4701-a481-5fa047ab1fa7/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk. {{(pid=63028) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1370.153066] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bdb82c53-e81c-4b5d-a363-179e01be3a8a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.160127] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1370.160127] env[63028]: value = "task-2736584" [ 1370.160127] env[63028]: _type = "Task" [ 1370.160127] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.167772] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.670407] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736584, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.43656} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.670605] env[63028]: INFO nova.virt.vmwareapi.ds_util [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] 068fdccf-16f5-4701-a481-5fa047ab1fa7/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk. [ 1370.671352] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce63402d-e0c7-4346-80aa-58ce2b50e881 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.695075] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 068fdccf-16f5-4701-a481-5fa047ab1fa7/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1370.695310] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6646366f-cba1-40ee-956d-c79f4e851201 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.712630] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1370.712630] env[63028]: value = "task-2736585" [ 1370.712630] env[63028]: _type = "Task" [ 1370.712630] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.719716] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736585, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.222814] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736585, 'name': ReconfigVM_Task, 'duration_secs': 0.272748} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.223182] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 068fdccf-16f5-4701-a481-5fa047ab1fa7/f2ba2026-3f4b-431c-97c1-c4ba582a9907-rescue.vmdk or device None with type thin {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1371.223942] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac12160-e1f2-4de2-9952-db185f633fa4 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.247629] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1146008c-cb11-47da-81a7-90b8450176c1 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.261623] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1371.261623] env[63028]: value = "task-2736586" [ 1371.261623] env[63028]: _type = "Task" [ 1371.261623] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.268776] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736586, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.770694] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736586, 'name': ReconfigVM_Task, 'duration_secs': 0.148062} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.770927] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1371.771153] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9773781-243e-473f-b0ae-7fd4029ad378 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.776738] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1371.776738] env[63028]: value = "task-2736587" [ 1371.776738] env[63028]: _type = "Task" [ 1371.776738] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.784747] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736587, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.286408] env[63028]: DEBUG oslo_vmware.api [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736587, 'name': PowerOnVM_Task, 'duration_secs': 0.411452} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.286760] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1372.289320] env[63028]: DEBUG nova.compute.manager [None req-5610274a-f8e6-4bf2-a18c-455fad19571e tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1372.290054] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8d08b0-029b-47b8-8d38-f61bd59dac58 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.031785] env[63028]: DEBUG nova.compute.manager [req-f2596a49-1fa7-4603-9d67-42e082dbe700 req-5f060961-9ec9-4bd9-b6b2-30066ba4f1fe service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Received event network-changed-91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1373.031996] env[63028]: DEBUG nova.compute.manager [req-f2596a49-1fa7-4603-9d67-42e082dbe700 req-5f060961-9ec9-4bd9-b6b2-30066ba4f1fe service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Refreshing instance network info cache due to event network-changed-91f54c2c-815f-47bd-aa41-2d2ede7ba86d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1373.032232] env[63028]: DEBUG oslo_concurrency.lockutils [req-f2596a49-1fa7-4603-9d67-42e082dbe700 req-5f060961-9ec9-4bd9-b6b2-30066ba4f1fe service nova] Acquiring lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1373.032380] env[63028]: DEBUG oslo_concurrency.lockutils [req-f2596a49-1fa7-4603-9d67-42e082dbe700 req-5f060961-9ec9-4bd9-b6b2-30066ba4f1fe service nova] Acquired lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1373.032553] env[63028]: DEBUG nova.network.neutron [req-f2596a49-1fa7-4603-9d67-42e082dbe700 req-5f060961-9ec9-4bd9-b6b2-30066ba4f1fe service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Refreshing network info cache for port 91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1374.021983] env[63028]: DEBUG nova.network.neutron [req-f2596a49-1fa7-4603-9d67-42e082dbe700 req-5f060961-9ec9-4bd9-b6b2-30066ba4f1fe service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updated VIF entry in instance network info cache for port 91f54c2c-815f-47bd-aa41-2d2ede7ba86d. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1374.022369] env[63028]: DEBUG nova.network.neutron [req-f2596a49-1fa7-4603-9d67-42e082dbe700 req-5f060961-9ec9-4bd9-b6b2-30066ba4f1fe service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updating instance_info_cache with network_info: [{"id": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "address": "fa:16:3e:6e:75:a7", "network": {"id": "b78cce39-4f77-4983-a56e-8966d8aaf751", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-744678156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2342baa8f1eb4682b1ddeb51638cdbcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91f54c2c-81", "ovs_interfaceid": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1374.525416] env[63028]: DEBUG oslo_concurrency.lockutils [req-f2596a49-1fa7-4603-9d67-42e082dbe700 req-5f060961-9ec9-4bd9-b6b2-30066ba4f1fe service nova] Releasing lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.059113] env[63028]: DEBUG nova.compute.manager [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Received event network-changed-91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1375.059113] env[63028]: DEBUG nova.compute.manager [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Refreshing instance network info cache due to event network-changed-91f54c2c-815f-47bd-aa41-2d2ede7ba86d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1375.059113] env[63028]: DEBUG oslo_concurrency.lockutils [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] Acquiring lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1375.059113] env[63028]: DEBUG oslo_concurrency.lockutils [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] Acquired lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.059113] env[63028]: DEBUG nova.network.neutron [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Refreshing network info cache for port 91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1375.144257] env[63028]: DEBUG nova.compute.manager [req-27d8cff7-9285-4727-aa8e-a58df89747ff req-a59cbb01-6dbd-457c-86fe-bbe0ecb072d5 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Received event network-changed-91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1375.144461] env[63028]: DEBUG nova.compute.manager [req-27d8cff7-9285-4727-aa8e-a58df89747ff req-a59cbb01-6dbd-457c-86fe-bbe0ecb072d5 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Refreshing instance network info cache due to event network-changed-91f54c2c-815f-47bd-aa41-2d2ede7ba86d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1375.144649] env[63028]: DEBUG oslo_concurrency.lockutils [req-27d8cff7-9285-4727-aa8e-a58df89747ff req-a59cbb01-6dbd-457c-86fe-bbe0ecb072d5 service nova] Acquiring lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1375.781877] env[63028]: DEBUG nova.network.neutron [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updated VIF entry in instance network info cache for port 91f54c2c-815f-47bd-aa41-2d2ede7ba86d. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1375.782243] env[63028]: DEBUG nova.network.neutron [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updating instance_info_cache with network_info: [{"id": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "address": "fa:16:3e:6e:75:a7", "network": {"id": "b78cce39-4f77-4983-a56e-8966d8aaf751", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-744678156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2342baa8f1eb4682b1ddeb51638cdbcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91f54c2c-81", "ovs_interfaceid": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.269443] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquiring lock "068fdccf-16f5-4701-a481-5fa047ab1fa7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.269745] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Lock "068fdccf-16f5-4701-a481-5fa047ab1fa7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.269935] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquiring lock "068fdccf-16f5-4701-a481-5fa047ab1fa7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.270135] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Lock "068fdccf-16f5-4701-a481-5fa047ab1fa7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.270303] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Lock "068fdccf-16f5-4701-a481-5fa047ab1fa7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.272291] env[63028]: INFO nova.compute.manager [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Terminating instance [ 1376.284785] env[63028]: DEBUG oslo_concurrency.lockutils [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] Releasing lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.285010] env[63028]: DEBUG nova.compute.manager [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Received event network-changed-91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1376.285187] env[63028]: DEBUG nova.compute.manager [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Refreshing instance network info cache due to event network-changed-91f54c2c-815f-47bd-aa41-2d2ede7ba86d. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1376.285359] env[63028]: DEBUG oslo_concurrency.lockutils [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] Acquiring lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.285498] env[63028]: DEBUG oslo_concurrency.lockutils [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] Acquired lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.285654] env[63028]: DEBUG nova.network.neutron [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Refreshing network info cache for port 91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1376.777553] env[63028]: DEBUG nova.compute.manager [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1376.777769] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1376.778673] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b7bee0-714c-47c5-9629-e17b21d1de79 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.786212] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1376.786447] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b31bc3e9-7bca-4a60-838c-6e3b240b8ea3 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.794304] env[63028]: DEBUG oslo_vmware.api [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1376.794304] env[63028]: value = "task-2736588" [ 1376.794304] env[63028]: _type = "Task" [ 1376.794304] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.801738] env[63028]: DEBUG oslo_vmware.api [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736588, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.969351] env[63028]: DEBUG nova.network.neutron [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updated VIF entry in instance network info cache for port 91f54c2c-815f-47bd-aa41-2d2ede7ba86d. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1376.969717] env[63028]: DEBUG nova.network.neutron [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updating instance_info_cache with network_info: [{"id": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "address": "fa:16:3e:6e:75:a7", "network": {"id": "b78cce39-4f77-4983-a56e-8966d8aaf751", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-744678156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2342baa8f1eb4682b1ddeb51638cdbcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91f54c2c-81", "ovs_interfaceid": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.304287] env[63028]: DEBUG oslo_vmware.api [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736588, 'name': PowerOffVM_Task, 'duration_secs': 0.253601} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.304677] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1377.304677] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1377.304911] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5049e35-8ae1-4caa-a3d6-6ca5a2a142cf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.472376] env[63028]: DEBUG oslo_concurrency.lockutils [req-11dbc42a-2fa7-4ffc-9684-0c742c8ab4ca req-87936ada-eb62-4f82-a732-c376bcea6077 service nova] Releasing lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.472824] env[63028]: DEBUG oslo_concurrency.lockutils [req-27d8cff7-9285-4727-aa8e-a58df89747ff req-a59cbb01-6dbd-457c-86fe-bbe0ecb072d5 service nova] Acquired lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.473032] env[63028]: DEBUG nova.network.neutron [req-27d8cff7-9285-4727-aa8e-a58df89747ff req-a59cbb01-6dbd-457c-86fe-bbe0ecb072d5 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Refreshing network info cache for port 91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1377.504845] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1377.505065] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1377.505246] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Deleting the datastore file [datastore1] 068fdccf-16f5-4701-a481-5fa047ab1fa7 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1377.505498] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c37dc2c-0208-4ee9-9d06-31b1a274cca9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.512736] env[63028]: DEBUG oslo_vmware.api [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for the task: (returnval){ [ 1377.512736] env[63028]: value = "task-2736590" [ 1377.512736] env[63028]: _type = "Task" [ 1377.512736] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.521107] env[63028]: DEBUG oslo_vmware.api [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.022335] env[63028]: DEBUG oslo_vmware.api [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Task: {'id': task-2736590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174765} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.022631] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1378.022759] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1378.022930] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1378.023122] env[63028]: INFO nova.compute.manager [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1378.023365] env[63028]: DEBUG oslo.service.loopingcall [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1378.023547] env[63028]: DEBUG nova.compute.manager [-] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1378.023639] env[63028]: DEBUG nova.network.neutron [-] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1378.249510] env[63028]: DEBUG nova.network.neutron [req-27d8cff7-9285-4727-aa8e-a58df89747ff req-a59cbb01-6dbd-457c-86fe-bbe0ecb072d5 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updated VIF entry in instance network info cache for port 91f54c2c-815f-47bd-aa41-2d2ede7ba86d. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1378.249817] env[63028]: DEBUG nova.network.neutron [req-27d8cff7-9285-4727-aa8e-a58df89747ff req-a59cbb01-6dbd-457c-86fe-bbe0ecb072d5 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updating instance_info_cache with network_info: [{"id": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "address": "fa:16:3e:6e:75:a7", "network": {"id": "b78cce39-4f77-4983-a56e-8966d8aaf751", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-744678156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2342baa8f1eb4682b1ddeb51638cdbcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91f54c2c-81", "ovs_interfaceid": "91f54c2c-815f-47bd-aa41-2d2ede7ba86d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.362311] env[63028]: DEBUG nova.compute.manager [req-ebe78272-96d7-452f-bd43-0bd25fc73820 req-031f4c79-35e5-4a08-8558-220a3dfd6e14 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Received event network-vif-deleted-91f54c2c-815f-47bd-aa41-2d2ede7ba86d {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1378.362639] env[63028]: INFO nova.compute.manager [req-ebe78272-96d7-452f-bd43-0bd25fc73820 req-031f4c79-35e5-4a08-8558-220a3dfd6e14 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Neutron deleted interface 91f54c2c-815f-47bd-aa41-2d2ede7ba86d; detaching it from the instance and deleting it from the info cache [ 1378.362669] env[63028]: DEBUG nova.network.neutron [req-ebe78272-96d7-452f-bd43-0bd25fc73820 req-031f4c79-35e5-4a08-8558-220a3dfd6e14 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.752179] env[63028]: DEBUG oslo_concurrency.lockutils [req-27d8cff7-9285-4727-aa8e-a58df89747ff req-a59cbb01-6dbd-457c-86fe-bbe0ecb072d5 service nova] Releasing lock "refresh_cache-068fdccf-16f5-4701-a481-5fa047ab1fa7" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.845942] env[63028]: DEBUG nova.network.neutron [-] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.865036] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aabcda5d-8f6b-4fd0-830f-ac9f73feb9ff {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.874780] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2533163a-4e65-4482-978f-ffb52daab60a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.898415] env[63028]: DEBUG nova.compute.manager [req-ebe78272-96d7-452f-bd43-0bd25fc73820 req-031f4c79-35e5-4a08-8558-220a3dfd6e14 service nova] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Detach interface failed, port_id=91f54c2c-815f-47bd-aa41-2d2ede7ba86d, reason: Instance 068fdccf-16f5-4701-a481-5fa047ab1fa7 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1379.348637] env[63028]: INFO nova.compute.manager [-] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Took 1.32 seconds to deallocate network for instance. [ 1379.854876] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.855254] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.855368] env[63028]: DEBUG nova.objects.instance [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Lazy-loading 'resources' on Instance uuid 068fdccf-16f5-4701-a481-5fa047ab1fa7 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1380.389397] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321f9339-8d5b-43fd-947b-a426b45a244c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.396715] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd427907-1777-4b6f-bf83-55091dd4847a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.426272] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752412db-129a-4769-856a-8a1aefe47b6d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.433365] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33aa0964-5024-4a7e-8edc-b97a3e01d050 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.446167] env[63028]: DEBUG nova.compute.provider_tree [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1380.949346] env[63028]: DEBUG nova.scheduler.client.report [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1381.454666] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.599s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.475052] env[63028]: INFO nova.scheduler.client.report [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Deleted allocations for instance 068fdccf-16f5-4701-a481-5fa047ab1fa7 [ 1381.982642] env[63028]: DEBUG oslo_concurrency.lockutils [None req-9bdb36f9-04cb-4637-99d4-acf80939bd8d tempest-ServerRescueTestJSONUnderV235-845350109 tempest-ServerRescueTestJSONUnderV235-845350109-project-member] Lock "068fdccf-16f5-4701-a481-5fa047ab1fa7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.713s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.609064] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Acquiring lock "f68e0910-2d7c-4381-84cc-0f44a41d4fa9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.609342] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Lock "f68e0910-2d7c-4381-84cc-0f44a41d4fa9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.113123] env[63028]: DEBUG nova.compute.manager [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Starting instance... {{(pid=63028) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1387.632706] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.632958] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.634490] env[63028]: INFO nova.compute.claims [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1388.670922] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23708cfd-c69e-4f74-80d7-add5830e3341 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.678771] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2af56d1-daee-497c-9602-0f6582a8a4f0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.708577] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be8336d-adae-4941-a4ac-6a37d680db90 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.715310] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8130d88-4c1d-47be-84b0-33487c44d76c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.727936] env[63028]: DEBUG nova.compute.provider_tree [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1389.231577] env[63028]: DEBUG nova.scheduler.client.report [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1389.736547] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.103s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.737081] env[63028]: DEBUG nova.compute.manager [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Start building networks asynchronously for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1390.242312] env[63028]: DEBUG nova.compute.utils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Using /dev/sd instead of None {{(pid=63028) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1390.244426] env[63028]: DEBUG nova.compute.manager [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Allocating IP information in the background. {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1390.244752] env[63028]: DEBUG nova.network.neutron [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] allocate_for_instance() {{(pid=63028) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1390.282733] env[63028]: DEBUG nova.policy [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '03dc08a2f9ef4f19953628f9f1d64d2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cbe7f96592454994a0465a923155f328', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63028) authorize /opt/stack/nova/nova/policy.py:192}} [ 1390.511048] env[63028]: DEBUG nova.network.neutron [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Successfully created port: ffb1392b-9625-4c1e-a6b2-e091f8f097aa {{(pid=63028) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1390.749761] env[63028]: DEBUG nova.compute.manager [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Start building block device mappings for instance. {{(pid=63028) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1391.759517] env[63028]: DEBUG nova.compute.manager [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Start spawning the instance on the hypervisor. {{(pid=63028) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1391.785654] env[63028]: DEBUG nova.virt.hardware [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-20T17:52:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-20T17:52:17Z,direct_url=,disk_format='vmdk',id=f2ba2026-3f4b-431c-97c1-c4ba582a9907,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='96661ac8d4f04d6e97eea4809b444133',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-20T17:52:17Z,virtual_size=,visibility=), allow threads: False {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1391.785907] env[63028]: DEBUG nova.virt.hardware [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Flavor limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1391.786086] env[63028]: DEBUG nova.virt.hardware [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Image limits 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1391.786285] env[63028]: DEBUG nova.virt.hardware [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Flavor pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1391.786432] env[63028]: DEBUG nova.virt.hardware [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Image pref 0:0:0 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1391.786579] env[63028]: DEBUG nova.virt.hardware [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63028) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1391.786812] env[63028]: DEBUG nova.virt.hardware [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1391.786976] env[63028]: DEBUG nova.virt.hardware [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1391.787169] env[63028]: DEBUG nova.virt.hardware [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Got 1 possible topologies {{(pid=63028) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1391.787334] env[63028]: DEBUG nova.virt.hardware [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1391.787501] env[63028]: DEBUG nova.virt.hardware [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63028) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1391.788452] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9f8778-f408-4342-9790-fc00a45e4149 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.796181] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8601a8c1-7c1c-4fbb-9f3f-3d7ef6341228 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.854448] env[63028]: DEBUG nova.compute.manager [req-87508616-35e7-404a-ad79-9c414ca33054 req-37b36bb8-de64-48fa-ae13-f07a8bd3d4ac service nova] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Received event network-vif-plugged-ffb1392b-9625-4c1e-a6b2-e091f8f097aa {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1391.854672] env[63028]: DEBUG oslo_concurrency.lockutils [req-87508616-35e7-404a-ad79-9c414ca33054 req-37b36bb8-de64-48fa-ae13-f07a8bd3d4ac service nova] Acquiring lock "f68e0910-2d7c-4381-84cc-0f44a41d4fa9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.854895] env[63028]: DEBUG oslo_concurrency.lockutils [req-87508616-35e7-404a-ad79-9c414ca33054 req-37b36bb8-de64-48fa-ae13-f07a8bd3d4ac service nova] Lock "f68e0910-2d7c-4381-84cc-0f44a41d4fa9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.855059] env[63028]: DEBUG oslo_concurrency.lockutils [req-87508616-35e7-404a-ad79-9c414ca33054 req-37b36bb8-de64-48fa-ae13-f07a8bd3d4ac service nova] Lock "f68e0910-2d7c-4381-84cc-0f44a41d4fa9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.855233] env[63028]: DEBUG nova.compute.manager [req-87508616-35e7-404a-ad79-9c414ca33054 req-37b36bb8-de64-48fa-ae13-f07a8bd3d4ac service nova] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] No waiting events found dispatching network-vif-plugged-ffb1392b-9625-4c1e-a6b2-e091f8f097aa {{(pid=63028) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1391.855399] env[63028]: WARNING nova.compute.manager [req-87508616-35e7-404a-ad79-9c414ca33054 req-37b36bb8-de64-48fa-ae13-f07a8bd3d4ac service nova] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Received unexpected event network-vif-plugged-ffb1392b-9625-4c1e-a6b2-e091f8f097aa for instance with vm_state building and task_state spawning. [ 1391.937636] env[63028]: DEBUG nova.network.neutron [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Successfully updated port: ffb1392b-9625-4c1e-a6b2-e091f8f097aa {{(pid=63028) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1392.440468] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Acquiring lock "refresh_cache-f68e0910-2d7c-4381-84cc-0f44a41d4fa9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.440628] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Acquired lock "refresh_cache-f68e0910-2d7c-4381-84cc-0f44a41d4fa9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.440801] env[63028]: DEBUG nova.network.neutron [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Building network info cache for instance {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1392.972702] env[63028]: DEBUG nova.network.neutron [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Instance cache missing network info. {{(pid=63028) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1393.087323] env[63028]: DEBUG nova.network.neutron [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Updating instance_info_cache with network_info: [{"id": "ffb1392b-9625-4c1e-a6b2-e091f8f097aa", "address": "fa:16:3e:be:cd:e9", "network": {"id": "3b24ba38-5d7d-43c9-93b1-3afbf918d701", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1803633307-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cbe7f96592454994a0465a923155f328", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e365f3b9-706b-4fa2-8f95-ae51b35ab011", "external-id": "nsx-vlan-transportzone-154", "segmentation_id": 154, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffb1392b-96", "ovs_interfaceid": "ffb1392b-9625-4c1e-a6b2-e091f8f097aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.590300] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Releasing lock "refresh_cache-f68e0910-2d7c-4381-84cc-0f44a41d4fa9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.590614] env[63028]: DEBUG nova.compute.manager [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Instance network_info: |[{"id": "ffb1392b-9625-4c1e-a6b2-e091f8f097aa", "address": "fa:16:3e:be:cd:e9", "network": {"id": "3b24ba38-5d7d-43c9-93b1-3afbf918d701", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1803633307-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cbe7f96592454994a0465a923155f328", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e365f3b9-706b-4fa2-8f95-ae51b35ab011", "external-id": "nsx-vlan-transportzone-154", "segmentation_id": 154, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffb1392b-96", "ovs_interfaceid": "ffb1392b-9625-4c1e-a6b2-e091f8f097aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63028) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1393.591114] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:cd:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e365f3b9-706b-4fa2-8f95-ae51b35ab011', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ffb1392b-9625-4c1e-a6b2-e091f8f097aa', 'vif_model': 'vmxnet3'}] {{(pid=63028) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1393.598391] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Creating folder: Project (cbe7f96592454994a0465a923155f328). Parent ref: group-v550570. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1393.598656] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-493a332b-3b25-45a5-a590-6a0dbc1d9c94 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.611885] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Created folder: Project (cbe7f96592454994a0465a923155f328) in parent group-v550570. [ 1393.611885] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Creating folder: Instances. Parent ref: group-v550906. {{(pid=63028) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1393.612029] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7063e685-1159-4b0e-810b-7ede2e6c1a7c {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.621159] env[63028]: INFO nova.virt.vmwareapi.vm_util [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Created folder: Instances in parent group-v550906. [ 1393.621389] env[63028]: DEBUG oslo.service.loopingcall [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1393.621569] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Creating VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1393.621758] env[63028]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1840a25d-a411-4e30-a03b-25b96ad02bc0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.639936] env[63028]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1393.639936] env[63028]: value = "task-2736593" [ 1393.639936] env[63028]: _type = "Task" [ 1393.639936] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.646916] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736593, 'name': CreateVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.880533] env[63028]: DEBUG nova.compute.manager [req-816deea0-30b6-4778-bf8e-a469f45e08bf req-af8e7b2b-7417-4afb-be85-906864ea2ca5 service nova] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Received event network-changed-ffb1392b-9625-4c1e-a6b2-e091f8f097aa {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1393.880733] env[63028]: DEBUG nova.compute.manager [req-816deea0-30b6-4778-bf8e-a469f45e08bf req-af8e7b2b-7417-4afb-be85-906864ea2ca5 service nova] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Refreshing instance network info cache due to event network-changed-ffb1392b-9625-4c1e-a6b2-e091f8f097aa. {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1393.880952] env[63028]: DEBUG oslo_concurrency.lockutils [req-816deea0-30b6-4778-bf8e-a469f45e08bf req-af8e7b2b-7417-4afb-be85-906864ea2ca5 service nova] Acquiring lock "refresh_cache-f68e0910-2d7c-4381-84cc-0f44a41d4fa9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1393.881109] env[63028]: DEBUG oslo_concurrency.lockutils [req-816deea0-30b6-4778-bf8e-a469f45e08bf req-af8e7b2b-7417-4afb-be85-906864ea2ca5 service nova] Acquired lock "refresh_cache-f68e0910-2d7c-4381-84cc-0f44a41d4fa9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.881284] env[63028]: DEBUG nova.network.neutron [req-816deea0-30b6-4778-bf8e-a469f45e08bf req-af8e7b2b-7417-4afb-be85-906864ea2ca5 service nova] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Refreshing network info cache for port ffb1392b-9625-4c1e-a6b2-e091f8f097aa {{(pid=63028) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1394.150126] env[63028]: DEBUG oslo_vmware.api [-] Task: {'id': task-2736593, 'name': CreateVM_Task, 'duration_secs': 0.280736} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.150411] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Created VM on the ESX host {{(pid=63028) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1394.150904] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.151083] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.151398] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1394.151734] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13520b7f-f0cf-4773-bd5e-40367664014f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.155870] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Waiting for the task: (returnval){ [ 1394.155870] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5225b3a8-fd10-6b55-e706-62fdf3b6d928" [ 1394.155870] env[63028]: _type = "Task" [ 1394.155870] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.162930] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5225b3a8-fd10-6b55-e706-62fdf3b6d928, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.551443] env[63028]: DEBUG nova.network.neutron [req-816deea0-30b6-4778-bf8e-a469f45e08bf req-af8e7b2b-7417-4afb-be85-906864ea2ca5 service nova] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Updated VIF entry in instance network info cache for port ffb1392b-9625-4c1e-a6b2-e091f8f097aa. {{(pid=63028) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1394.551787] env[63028]: DEBUG nova.network.neutron [req-816deea0-30b6-4778-bf8e-a469f45e08bf req-af8e7b2b-7417-4afb-be85-906864ea2ca5 service nova] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Updating instance_info_cache with network_info: [{"id": "ffb1392b-9625-4c1e-a6b2-e091f8f097aa", "address": "fa:16:3e:be:cd:e9", "network": {"id": "3b24ba38-5d7d-43c9-93b1-3afbf918d701", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1803633307-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cbe7f96592454994a0465a923155f328", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e365f3b9-706b-4fa2-8f95-ae51b35ab011", "external-id": "nsx-vlan-transportzone-154", "segmentation_id": 154, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffb1392b-96", "ovs_interfaceid": "ffb1392b-9625-4c1e-a6b2-e091f8f097aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.665861] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5225b3a8-fd10-6b55-e706-62fdf3b6d928, 'name': SearchDatastore_Task, 'duration_secs': 0.009399} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.666139] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.666375] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Processing image f2ba2026-3f4b-431c-97c1-c4ba582a9907 {{(pid=63028) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1394.666607] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.666776] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.666963] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1394.667221] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-403d1a0f-05ef-4457-a355-f99abfea32ad {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.675017] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63028) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1394.675206] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63028) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1394.675865] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40ba7138-a3fe-419a-b40e-fb0494530598 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.680510] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Waiting for the task: (returnval){ [ 1394.680510] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521e279b-7aca-3df8-d4c4-9ac9db660bdc" [ 1394.680510] env[63028]: _type = "Task" [ 1394.680510] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.687399] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521e279b-7aca-3df8-d4c4-9ac9db660bdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.054878] env[63028]: DEBUG oslo_concurrency.lockutils [req-816deea0-30b6-4778-bf8e-a469f45e08bf req-af8e7b2b-7417-4afb-be85-906864ea2ca5 service nova] Releasing lock "refresh_cache-f68e0910-2d7c-4381-84cc-0f44a41d4fa9" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.191376] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]521e279b-7aca-3df8-d4c4-9ac9db660bdc, 'name': SearchDatastore_Task, 'duration_secs': 0.008259} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.192114] env[63028]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6ad04ee-6bb3-42e9-ac85-dc493ff95c09 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.197307] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Waiting for the task: (returnval){ [ 1395.197307] env[63028]: value = "session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5234d19d-2947-7387-f29f-5909da357990" [ 1395.197307] env[63028]: _type = "Task" [ 1395.197307] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.204467] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5234d19d-2947-7387-f29f-5909da357990, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.708505] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': session[52a24e3e-72b2-90b6-2fd0-d6cd18cf73c8]5234d19d-2947-7387-f29f-5909da357990, 'name': SearchDatastore_Task, 'duration_secs': 0.009152} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.708754] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk" {{(pid=63028) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.709017] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] f68e0910-2d7c-4381-84cc-0f44a41d4fa9/f68e0910-2d7c-4381-84cc-0f44a41d4fa9.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1395.709266] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4de95476-c714-4a88-8077-3c08e0fadf00 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.715883] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Waiting for the task: (returnval){ [ 1395.715883] env[63028]: value = "task-2736594" [ 1395.715883] env[63028]: _type = "Task" [ 1395.715883] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.722777] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.226051] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736594, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442864} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.226411] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f2ba2026-3f4b-431c-97c1-c4ba582a9907/f2ba2026-3f4b-431c-97c1-c4ba582a9907.vmdk to [datastore1] f68e0910-2d7c-4381-84cc-0f44a41d4fa9/f68e0910-2d7c-4381-84cc-0f44a41d4fa9.vmdk {{(pid=63028) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1396.226566] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Extending root virtual disk to 1048576 {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1396.226776] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4bb7286-8550-427d-ac3e-84af2d22415d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.232459] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Waiting for the task: (returnval){ [ 1396.232459] env[63028]: value = "task-2736595" [ 1396.232459] env[63028]: _type = "Task" [ 1396.232459] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.239705] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736595, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.741687] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736595, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061216} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.741906] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Extended root virtual disk {{(pid=63028) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1396.742653] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0311bdd-d9a7-4646-8c98-1b3232c5098e {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.763918] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] f68e0910-2d7c-4381-84cc-0f44a41d4fa9/f68e0910-2d7c-4381-84cc-0f44a41d4fa9.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1396.764151] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4dfb31e-c9aa-488c-9f35-926de1462b93 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.781751] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Waiting for the task: (returnval){ [ 1396.781751] env[63028]: value = "task-2736596" [ 1396.781751] env[63028]: _type = "Task" [ 1396.781751] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.788748] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736596, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.291915] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736596, 'name': ReconfigVM_Task, 'duration_secs': 0.301165} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.292322] env[63028]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Reconfigured VM instance instance-0000007c to attach disk [datastore1] f68e0910-2d7c-4381-84cc-0f44a41d4fa9/f68e0910-2d7c-4381-84cc-0f44a41d4fa9.vmdk or device None with type sparse {{(pid=63028) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1397.292800] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d43d4abe-2179-45ef-9f62-e4e0efe9d7d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.298752] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Waiting for the task: (returnval){ [ 1397.298752] env[63028]: value = "task-2736597" [ 1397.298752] env[63028]: _type = "Task" [ 1397.298752] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.305691] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736597, 'name': Rename_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.807980] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736597, 'name': Rename_Task, 'duration_secs': 0.125074} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.808283] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Powering on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1397.808520] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-476c559f-1abc-427c-bab3-7411b767c2d7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.815126] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Waiting for the task: (returnval){ [ 1397.815126] env[63028]: value = "task-2736598" [ 1397.815126] env[63028]: _type = "Task" [ 1397.815126] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.822130] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736598, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.324186] env[63028]: DEBUG oslo_vmware.api [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736598, 'name': PowerOnVM_Task, 'duration_secs': 0.41328} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.324555] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Powered on the VM {{(pid=63028) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1398.324628] env[63028]: INFO nova.compute.manager [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Took 6.56 seconds to spawn the instance on the hypervisor. [ 1398.324829] env[63028]: DEBUG nova.compute.manager [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Checking state {{(pid=63028) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1398.325590] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be478f81-cc3f-47ec-b5e5-a2dfefbc3d64 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.842249] env[63028]: INFO nova.compute.manager [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Took 11.22 seconds to build instance. [ 1399.344587] env[63028]: DEBUG oslo_concurrency.lockutils [None req-c2b9361a-bb98-4f46-98ad-b235e39f21c6 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Lock "f68e0910-2d7c-4381-84cc-0f44a41d4fa9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.735s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.435835] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Acquiring lock "f68e0910-2d7c-4381-84cc-0f44a41d4fa9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.436237] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Lock "f68e0910-2d7c-4381-84cc-0f44a41d4fa9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.436237] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Acquiring lock "f68e0910-2d7c-4381-84cc-0f44a41d4fa9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.436398] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Lock "f68e0910-2d7c-4381-84cc-0f44a41d4fa9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.436570] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Lock "f68e0910-2d7c-4381-84cc-0f44a41d4fa9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.438773] env[63028]: INFO nova.compute.manager [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Terminating instance [ 1400.942335] env[63028]: DEBUG nova.compute.manager [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Start destroying the instance on the hypervisor. {{(pid=63028) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1400.942593] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Destroying instance {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1400.943687] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce0b323-35ab-425d-80a3-0fa08e8c73a7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.951350] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Powering off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1400.951592] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-119ef56c-78ee-46fa-a42e-b39be36fc8f9 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.958053] env[63028]: DEBUG oslo_vmware.api [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Waiting for the task: (returnval){ [ 1400.958053] env[63028]: value = "task-2736599" [ 1400.958053] env[63028]: _type = "Task" [ 1400.958053] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.966565] env[63028]: DEBUG oslo_vmware.api [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.467468] env[63028]: DEBUG oslo_vmware.api [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736599, 'name': PowerOffVM_Task, 'duration_secs': 0.196933} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.467807] env[63028]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Powered off the VM {{(pid=63028) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1401.467908] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Unregistering the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1401.468247] env[63028]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9fb8b79-3c76-45e0-8d7f-337027c4bf1d {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.525989] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Unregistered the VM {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1401.526229] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Deleting contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1401.526415] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Deleting the datastore file [datastore1] f68e0910-2d7c-4381-84cc-0f44a41d4fa9 {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1401.526673] env[63028]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a12a82d-94bf-44be-a7bf-35eed9f91ee8 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.533041] env[63028]: DEBUG oslo_vmware.api [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Waiting for the task: (returnval){ [ 1401.533041] env[63028]: value = "task-2736601" [ 1401.533041] env[63028]: _type = "Task" [ 1401.533041] env[63028]: } to complete. {{(pid=63028) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.541039] env[63028]: DEBUG oslo_vmware.api [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736601, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.041936] env[63028]: DEBUG oslo_vmware.api [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Task: {'id': task-2736601, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132132} completed successfully. {{(pid=63028) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.042201] env[63028]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Deleted the datastore file {{(pid=63028) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1402.042389] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Deleted contents of the VM from datastore datastore1 {{(pid=63028) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1402.042566] env[63028]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Instance destroyed {{(pid=63028) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1402.042741] env[63028]: INFO nova.compute.manager [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1402.042981] env[63028]: DEBUG oslo.service.loopingcall [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63028) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1402.043190] env[63028]: DEBUG nova.compute.manager [-] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Deallocating network for instance {{(pid=63028) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1402.043285] env[63028]: DEBUG nova.network.neutron [-] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] deallocate_for_instance() {{(pid=63028) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1402.299945] env[63028]: DEBUG nova.compute.manager [req-2ae3f1b0-5e29-466f-b333-0e3afbe1f030 req-17339ed5-fa4a-4e58-87f0-320193ff0b51 service nova] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Received event network-vif-deleted-ffb1392b-9625-4c1e-a6b2-e091f8f097aa {{(pid=63028) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1402.300380] env[63028]: INFO nova.compute.manager [req-2ae3f1b0-5e29-466f-b333-0e3afbe1f030 req-17339ed5-fa4a-4e58-87f0-320193ff0b51 service nova] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Neutron deleted interface ffb1392b-9625-4c1e-a6b2-e091f8f097aa; detaching it from the instance and deleting it from the info cache [ 1402.300380] env[63028]: DEBUG nova.network.neutron [req-2ae3f1b0-5e29-466f-b333-0e3afbe1f030 req-17339ed5-fa4a-4e58-87f0-320193ff0b51 service nova] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.782265] env[63028]: DEBUG nova.network.neutron [-] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Updating instance_info_cache with network_info: [] {{(pid=63028) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.802534] env[63028]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad56e0af-b2c7-4d95-8ec2-78e2dd04dd6b {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.812850] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471fedc3-9878-410d-92c2-b9d42f2ab78a {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.836586] env[63028]: DEBUG nova.compute.manager [req-2ae3f1b0-5e29-466f-b333-0e3afbe1f030 req-17339ed5-fa4a-4e58-87f0-320193ff0b51 service nova] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Detach interface failed, port_id=ffb1392b-9625-4c1e-a6b2-e091f8f097aa, reason: Instance f68e0910-2d7c-4381-84cc-0f44a41d4fa9 could not be found. {{(pid=63028) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1403.285104] env[63028]: INFO nova.compute.manager [-] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Took 1.24 seconds to deallocate network for instance. [ 1403.791276] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.791647] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.791753] env[63028]: DEBUG nova.objects.instance [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Lazy-loading 'resources' on Instance uuid f68e0910-2d7c-4381-84cc-0f44a41d4fa9 {{(pid=63028) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1404.327192] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdd0c7f-5bb9-4065-8483-aa3195854033 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.334686] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554d9aae-74f0-4d2a-a5d2-4d4408270eeb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.364274] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6503d621-2c58-435d-a6db-0656789a89e7 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.371498] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b12668-1ff2-4457-be61-bd60bada9898 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.384086] env[63028]: DEBUG nova.compute.provider_tree [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1404.886971] env[63028]: DEBUG nova.scheduler.client.report [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1405.393049] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.413309] env[63028]: INFO nova.scheduler.client.report [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Deleted allocations for instance f68e0910-2d7c-4381-84cc-0f44a41d4fa9 [ 1405.923700] env[63028]: DEBUG oslo_concurrency.lockutils [None req-6b2eada4-e6c5-4dac-b254-927abd3a02b9 tempest-ServerTagsTestJSON-2042373095 tempest-ServerTagsTestJSON-2042373095-project-member] Lock "f68e0910-2d7c-4381-84cc-0f44a41d4fa9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.488s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.657456] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.657883] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.657883] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.657985] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.658162] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.658318] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.658464] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.658605] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63028) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1418.658748] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.161715] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.161925] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.162136] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1419.162297] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63028) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1419.163184] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f27bf2-0ecd-4a5d-8f05-a65944a275df {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.171418] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f7c74e-d8a7-4495-a3ec-af6aa028e718 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.184805] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23694399-f999-40e8-8eb8-426b3b690ebf {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.190753] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4454cb-05a0-4045-8fe5-fbd5470a01cb {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.218233] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180938MB free_disk=111GB free_vcpus=48 pci_devices=None {{(pid=63028) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1419.218374] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.218557] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.350401] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1420.350654] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63028) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1420.363584] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4ff21b-973f-424e-bcbc-b093b1765d6f {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.372356] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48523ff8-12c3-4108-8f84-e70a2e671e88 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.400850] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d843b675-71d3-4bc6-840f-4939413ef6d0 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.407364] env[63028]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca7f111-93c8-466f-8e91-791c7104e8a6 {{(pid=63028) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.419842] env[63028]: DEBUG nova.compute.provider_tree [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed in ProviderTree for provider: 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 {{(pid=63028) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1420.923149] env[63028]: DEBUG nova.scheduler.client.report [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Inventory has not changed for provider 399bf4d0-2419-4e0e-a8f1-8aa71a7e64a2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 111, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63028) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1421.428586] env[63028]: DEBUG nova.compute.resource_tracker [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63028) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1421.428951] env[63028]: DEBUG oslo_concurrency.lockutils [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.210s {{(pid=63028) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.428998] env[63028]: DEBUG oslo_service.periodic_task [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63028) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1421.429148] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] Cleaning up deleted instances {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1421.937489] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] There are 22 instances to clean {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1421.937713] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: f68e0910-2d7c-4381-84cc-0f44a41d4fa9] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1422.441078] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 068fdccf-16f5-4701-a481-5fa047ab1fa7] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1422.944131] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 37a202b5-78f6-45a0-b753-7f9747214f3a] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1423.447948] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 670b71ba-9ef2-4c3b-b9e3-46e2ccfef2c4] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1423.950805] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 9e3ac23e-16ef-4626-817b-24683fd89b1d] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1424.454793] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 5c63c2bb-4725-4722-98e2-cd5c71944c4e] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1424.958665] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: e3e121c9-4cf4-4f79-b4de-fe9696ba7dbc] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1425.463092] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: feb8d206-718d-423a-afff-76c6975934e6] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1425.965443] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: b438b12e-874a-4883-b606-c28258e5a01a] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1426.469150] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: f96d4bcd-a032-4e4d-94e4-12d7013d5e3f] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1426.972899] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 629a3b6f-a74b-4193-bcf4-fc67a1752d5b] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1427.476461] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: 092c7673-97fb-4085-852c-04a7c19a73e7] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1427.980414] env[63028]: DEBUG nova.compute.manager [None req-62d9dc37-a0b6-4b77-abf6-91a5fdeadbc1 None None] [instance: e5767896-8203-4b18-826f-dcb2fe02268e] Instance has had 0 of 5 cleanup attempts {{(pid=63028) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}}